[ 502.279549] env[63371]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=63371) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 502.279956] env[63371]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=63371) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 502.279998] env[63371]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=63371) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 502.280343] env[63371]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 502.376146] env[63371]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=63371) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 502.386650] env[63371]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=63371) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 502.986229] env[63371]: INFO nova.virt.driver [None req-e752f2f9-f081-4bc0-b983-99830ce28746 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 503.057056] env[63371]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 503.057281] env[63371]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 503.057402] env[63371]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=63371) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 506.190484] env[63371]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-2a6237e7-0404-4ffb-ab84-c0079e70f1a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.207357] env[63371]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=63371) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 506.207511] env[63371]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-37e38ba8-f740-4613-b77c-07af2d7b2dfb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.240178] env[63371]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 00b12. [ 506.240361] env[63371]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.183s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 506.240921] env[63371]: INFO nova.virt.vmwareapi.driver [None req-e752f2f9-f081-4bc0-b983-99830ce28746 None None] VMware vCenter version: 7.0.3 [ 506.244432] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c06f24-982a-4d35-94a9-f04cbd8e2a02 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.261908] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000ec047-4f09-4b25-8ea2-30d67c104316 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.268473] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d2123a8-6c85-4176-98db-f6c9413b1c19 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.275297] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e426e6e-4d62-46ce-9fed-1febe292277f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.288654] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5904f43f-838e-48d6-97b7-d923d37f9605 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.294680] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef52fa3b-6160-4e31-9e1d-a8b25533b851 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.324790] env[63371]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-6636aaa4-2142-4ff1-bbe1-e579c9f234b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 506.329757] env[63371]: DEBUG nova.virt.vmwareapi.driver [None req-e752f2f9-f081-4bc0-b983-99830ce28746 None None] Extension org.openstack.compute already exists. {{(pid=63371) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:226}} [ 506.332474] env[63371]: INFO nova.compute.provider_config [None req-e752f2f9-f081-4bc0-b983-99830ce28746 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 506.835850] env[63371]: DEBUG nova.context [None req-e752f2f9-f081-4bc0-b983-99830ce28746 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),2164a72f-de1f-4c51-ba7d-fa987fc9734b(cell1) {{(pid=63371) load_cells /opt/stack/nova/nova/context.py:464}} [ 506.838083] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.838316] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 506.838979] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 506.839421] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Acquiring lock "2164a72f-de1f-4c51-ba7d-fa987fc9734b" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 506.839612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Lock "2164a72f-de1f-4c51-ba7d-fa987fc9734b" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 506.840719] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Lock "2164a72f-de1f-4c51-ba7d-fa987fc9734b" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 506.861123] env[63371]: INFO dbcounter [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Registered counter for database nova_cell0 [ 506.869439] env[63371]: INFO dbcounter [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Registered counter for database nova_cell1 [ 506.872727] env[63371]: DEBUG oslo_db.sqlalchemy.engines [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63371) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 506.873102] env[63371]: DEBUG oslo_db.sqlalchemy.engines [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63371) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 506.877813] env[63371]: ERROR nova.db.main.api [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 506.877813] env[63371]: result = function(*args, **kwargs) [ 506.877813] env[63371]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 506.877813] env[63371]: return func(*args, **kwargs) [ 506.877813] env[63371]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 506.877813] env[63371]: result = fn(*args, **kwargs) [ 506.877813] env[63371]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 506.877813] env[63371]: return f(*args, **kwargs) [ 506.877813] env[63371]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 506.877813] env[63371]: return db.service_get_minimum_version(context, binaries) [ 506.877813] env[63371]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 506.877813] env[63371]: _check_db_access() [ 506.877813] env[63371]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 506.877813] env[63371]: stacktrace = ''.join(traceback.format_stack()) [ 506.877813] env[63371]: [ 506.878801] env[63371]: ERROR nova.db.main.api [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 506.878801] env[63371]: result = function(*args, **kwargs) [ 506.878801] env[63371]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 506.878801] env[63371]: return func(*args, **kwargs) [ 506.878801] env[63371]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 506.878801] env[63371]: result = fn(*args, **kwargs) [ 506.878801] env[63371]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 506.878801] env[63371]: return f(*args, **kwargs) [ 506.878801] env[63371]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 506.878801] env[63371]: return db.service_get_minimum_version(context, binaries) [ 506.878801] env[63371]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 506.878801] env[63371]: _check_db_access() [ 506.878801] env[63371]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 506.878801] env[63371]: stacktrace = ''.join(traceback.format_stack()) [ 506.878801] env[63371]: [ 506.879332] env[63371]: WARNING nova.objects.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 506.879332] env[63371]: WARNING nova.objects.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Failed to get minimum service version for cell 2164a72f-de1f-4c51-ba7d-fa987fc9734b [ 506.879727] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Acquiring lock "singleton_lock" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 506.879886] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Acquired lock "singleton_lock" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 506.880142] env[63371]: DEBUG oslo_concurrency.lockutils [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Releasing lock "singleton_lock" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 506.880492] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Full set of CONF: {{(pid=63371) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 506.880640] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ******************************************************************************** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 506.880768] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Configuration options gathered from: {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 506.880902] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 506.881102] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 506.881233] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ================================================================================ {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 506.881446] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] allow_resize_to_same_host = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.881617] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] arq_binding_timeout = 300 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.881749] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] backdoor_port = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.881874] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] backdoor_socket = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.882067] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] block_device_allocate_retries = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.882242] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] block_device_allocate_retries_interval = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.882414] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cert = self.pem {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.882582] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.882748] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute_monitors = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.882913] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] config_dir = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.883094] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] config_drive_format = iso9660 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.883232] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.883392] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] config_source = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.883557] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] console_host = devstack {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.883717] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] control_exchange = nova {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.883872] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cpu_allocation_ratio = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.884038] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] daemon = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.884205] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] debug = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.884360] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] default_access_ip_network_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.884524] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] default_availability_zone = nova {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.884676] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] default_ephemeral_format = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.884832] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] default_green_pool_size = 1000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.885076] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.885242] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] default_schedule_zone = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.885398] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] disk_allocation_ratio = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.885557] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] enable_new_services = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.885730] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] enabled_apis = ['osapi_compute'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.885889] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] enabled_ssl_apis = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.886062] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] flat_injected = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.886222] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] force_config_drive = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.886377] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] force_raw_images = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.886543] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] graceful_shutdown_timeout = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.886701] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] heal_instance_info_cache_interval = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.886916] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] host = cpu-1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.887110] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] initial_cpu_allocation_ratio = 4.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.887275] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] initial_disk_allocation_ratio = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.887438] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] initial_ram_allocation_ratio = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.887653] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.887816] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_build_timeout = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.887971] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_delete_interval = 300 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.888154] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_format = [instance: %(uuid)s] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.888322] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_name_template = instance-%08x {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.888480] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_usage_audit = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.888648] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_usage_audit_period = month {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.888808] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.888970] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] instances_path = /opt/stack/data/nova/instances {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.889146] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] internal_service_availability_zone = internal {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.889302] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] key = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.889457] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] live_migration_retry_count = 30 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.889628] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_color = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.889787] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_config_append = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.889949] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891068] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_dir = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891068] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891068] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_options = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891068] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_rotate_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891068] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_rotate_interval_type = days {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891068] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] log_rotation_type = none {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891285] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891285] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891440] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891649] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.891780] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892032] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] long_rpc_timeout = 1800 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892129] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] max_concurrent_builds = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892309] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] max_concurrent_live_migrations = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892520] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] max_concurrent_snapshots = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892711] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] max_local_block_devices = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892899] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] max_logfile_count = 30 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.892962] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] max_logfile_size_mb = 200 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.893147] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] maximum_instance_delete_attempts = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.893320] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metadata_listen = 0.0.0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.893608] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metadata_listen_port = 8775 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.893685] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metadata_workers = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.893817] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] migrate_max_retries = -1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.893985] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] mkisofs_cmd = genisoimage {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.894323] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] my_block_storage_ip = 10.180.1.21 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.894399] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] my_ip = 10.180.1.21 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.894534] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] network_allocate_retries = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.894712] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.894893] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] osapi_compute_listen = 0.0.0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.895084] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] osapi_compute_listen_port = 8774 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.895277] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] osapi_compute_unique_server_name_scope = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.895416] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] osapi_compute_workers = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.895647] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] password_length = 12 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.895939] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] periodic_enable = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.896118] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] periodic_fuzzy_delay = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.896370] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] pointer_model = usbtablet {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.896607] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] preallocate_images = none {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.896852] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] publish_errors = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.897045] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] pybasedir = /opt/stack/nova {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.897274] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ram_allocation_ratio = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.897486] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] rate_limit_burst = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.897721] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] rate_limit_except_level = CRITICAL {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.897952] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] rate_limit_interval = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.898198] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reboot_timeout = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.898412] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reclaim_instance_interval = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.898636] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] record = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.898876] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reimage_timeout_per_gb = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.899118] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] report_interval = 120 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.899300] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] rescue_timeout = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.899525] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reserved_host_cpus = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.899693] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reserved_host_disk_mb = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.899955] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reserved_host_memory_mb = 512 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.900130] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] reserved_huge_pages = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.900328] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] resize_confirm_window = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.900695] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] resize_fs_using_block_device = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.900754] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] resume_guests_state_on_host_boot = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.900986] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.901255] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] rpc_response_timeout = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.901407] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] run_external_periodic_tasks = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.901763] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] running_deleted_instance_action = reap {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.901847] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] running_deleted_instance_poll_interval = 1800 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.901992] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] running_deleted_instance_timeout = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.902219] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler_instance_sync_interval = 120 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.902440] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_down_time = 720 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.902652] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] servicegroup_driver = db {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.902815] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] shell_completion = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.903046] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] shelved_offload_time = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.903346] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] shelved_poll_interval = 3600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.903465] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] shutdown_timeout = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.903694] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] source_is_ipv6 = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904686] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ssl_only = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904686] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904686] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] sync_power_state_interval = 600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904686] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] sync_power_state_pool_size = 1000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904686] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] syslog_log_facility = LOG_USER {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904879] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] tempdir = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.904951] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] timeout_nbd = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.905130] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] transport_url = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.905294] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] update_resources_interval = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.905456] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_cow_images = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.905614] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_eventlog = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.905771] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_journal = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.905928] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_json = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.906094] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_rootwrap_daemon = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.906250] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_stderr = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.906404] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] use_syslog = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.906560] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vcpu_pin_set = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.906726] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plugging_is_fatal = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.906889] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plugging_timeout = 300 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.907104] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] virt_mkfs = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.907274] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] volume_usage_poll_interval = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.907434] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] watch_log_file = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.907602] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] web = /usr/share/spice-html5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 506.907783] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.907946] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.908160] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_brick.wait_mpath_device_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.908395] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_concurrency.disable_process_locking = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.908982] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.909196] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.909373] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.909552] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_metrics.metrics_process_name = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.909725] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.909890] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.910083] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.auth_strategy = keystone {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.910256] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.compute_link_prefix = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.910471] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.910737] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.dhcp_domain = novalocal {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.910988] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.enable_instance_password = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.911262] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.glance_link_prefix = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.911521] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.911780] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.instance_list_cells_batch_strategy = distributed {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.912037] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.instance_list_per_project_cells = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.912294] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.list_records_by_skipping_down_cells = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.912543] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.local_metadata_per_cell = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.912797] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.max_limit = 1000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.913062] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.metadata_cache_expiration = 15 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.913324] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.neutron_default_tenant_id = default {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.913583] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.response_validation = warn {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.913837] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.use_neutron_default_nets = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.914107] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.914362] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_dynamic_failure_fatal = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.914611] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.914889] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_dynamic_ssl_certfile = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.915159] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_dynamic_targets = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.915414] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_jsonfile_path = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.915681] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api.vendordata_providers = ['StaticJSON'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.915951] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.backend = dogpile.cache.memcached {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.916255] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.backend_argument = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.916474] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.backend_expiration_time = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.916731] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.config_prefix = cache.oslo {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.916982] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.dead_timeout = 60.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.917239] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.debug_cache_backend = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.917475] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.enable_retry_client = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.917721] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.enable_socket_keepalive = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.917973] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.enabled = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.918237] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.enforce_fips_mode = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.918486] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.expiration_time = 600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.918732] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.hashclient_retry_attempts = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.918977] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.hashclient_retry_delay = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.919236] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_dead_retry = 300 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.919479] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_password = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.919727] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.919971] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.920228] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_pool_maxsize = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.920479] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_pool_unused_timeout = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.920726] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_sasl_enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.920983] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_servers = ['localhost:11211'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.921244] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_socket_timeout = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.921486] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.memcache_username = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.921736] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.proxies = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.921982] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_db = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.922235] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_password = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.922492] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_sentinel_service_name = mymaster {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.922750] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.923010] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_server = localhost:6379 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.923255] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_socket_timeout = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.923498] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.redis_username = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.923746] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.retry_attempts = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.923994] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.retry_delay = 0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.924253] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.socket_keepalive_count = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.924500] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.socket_keepalive_idle = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.924751] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.socket_keepalive_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.925023] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.tls_allowed_ciphers = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.925269] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.tls_cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.925506] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.tls_certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.925747] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.tls_enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.925984] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cache.tls_keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.926241] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.926497] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.auth_type = password {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.926740] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.926993] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.catalog_info = volumev3::publicURL {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.927244] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.927487] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.927736] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.cross_az_attach = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.927978] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.debug = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.928237] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.endpoint_template = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.928481] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.http_retries = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.928727] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.928964] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.929228] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.os_region_name = RegionOne {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.929470] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.929711] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cinder.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.929962] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.930216] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.cpu_dedicated_set = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.930468] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.cpu_shared_set = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.930717] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.image_type_exclude_list = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.930959] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.live_migration_wait_for_vif_plug = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.931217] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.max_concurrent_disk_ops = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.931463] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.max_disk_devices_to_attach = -1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.931706] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.931958] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.932219] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.resource_provider_association_refresh = 300 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.932464] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.932708] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.shutdown_retry_interval = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.932969] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.933243] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] conductor.workers = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.933502] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] console.allowed_origins = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.933745] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] console.ssl_ciphers = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.933991] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] console.ssl_minimum_version = default {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.934252] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] consoleauth.enforce_session_timeout = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.934501] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] consoleauth.token_ttl = 600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.934750] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.935048] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.935266] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.935509] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.935750] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.936035] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.936281] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.936521] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.936761] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.936998] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.937251] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.937493] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.937733] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.937980] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.service_type = accelerator {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.938235] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.938474] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.938719] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.938957] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.939230] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.939471] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] cyborg.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.939736] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.backend = sqlalchemy {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.939983] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.connection = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.940252] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.connection_debug = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.940516] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.connection_parameters = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.940768] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.connection_recycle_time = 3600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.941019] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.connection_trace = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.941268] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.db_inc_retry_interval = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.941518] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.db_max_retries = 20 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.941761] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.db_max_retry_interval = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.942012] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.db_retry_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.942261] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.max_overflow = 50 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.942505] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.max_pool_size = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.942748] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.max_retries = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.942996] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.mysql_sql_mode = TRADITIONAL {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.943249] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.mysql_wsrep_sync_wait = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.943491] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.pool_timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.943739] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.retry_interval = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.943980] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.slave_connection = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.944238] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.sqlite_synchronous = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.944483] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] database.use_db_reconnect = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.944747] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.backend = sqlalchemy {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.945009] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.connection = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.945278] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.connection_debug = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.945534] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.connection_parameters = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.945780] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.connection_recycle_time = 3600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.946039] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.connection_trace = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.946296] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.db_inc_retry_interval = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.946545] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.db_max_retries = 20 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.946795] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.db_max_retry_interval = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.947067] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.db_retry_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.947317] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.max_overflow = 50 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.947564] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.max_pool_size = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.947810] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.max_retries = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.948073] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.948319] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.mysql_wsrep_sync_wait = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.948558] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.pool_timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.948800] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.retry_interval = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.949055] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.slave_connection = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.949303] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] api_database.sqlite_synchronous = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.949556] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] devices.enabled_mdev_types = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.949814] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.950084] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ephemeral_storage_encryption.default_format = luks {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.950339] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ephemeral_storage_encryption.enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.950585] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ephemeral_storage_encryption.key_size = 512 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.950838] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.api_servers = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.951097] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.951348] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.951590] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.951834] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.952095] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.952340] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.debug = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.952588] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.default_trusted_certificate_ids = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.952831] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.enable_certificate_validation = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.953092] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.enable_rbd_download = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.953336] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.953579] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.953818] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.954071] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.954316] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.954563] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.num_retries = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.954814] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.rbd_ceph_conf = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.955075] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.rbd_connect_timeout = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.955356] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.rbd_pool = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.955615] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.rbd_user = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.955857] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.956115] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.956362] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.956618] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.service_type = image {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.956864] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.957122] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.957371] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.957611] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.957871] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.958129] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.verify_glance_signatures = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.958375] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] glance.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.958625] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] guestfs.debug = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.958869] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] mks.enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.959333] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.959596] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] image_cache.manager_interval = 2400 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.959850] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] image_cache.precache_concurrency = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.960118] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] image_cache.remove_unused_base_images = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.960390] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.960640] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.960894] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] image_cache.subdirectory_name = _base {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.961160] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.api_max_retries = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.961404] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.api_retry_interval = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.961646] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.961887] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.auth_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.962137] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.962375] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.962620] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.962864] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.conductor_group = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.963122] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.963368] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.963610] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.963852] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.964108] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.964355] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.964596] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.964841] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.peer_list = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.965098] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.965351] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.965605] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.serial_console_state_timeout = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.965856] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.966121] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.service_type = baremetal {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.966368] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.shard = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.966617] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.966856] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.967136] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.967381] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.967642] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.967879] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ironic.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.968156] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.968412] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] key_manager.fixed_key = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.968680] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.968920] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.barbican_api_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.969175] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.barbican_endpoint = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.969430] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.barbican_endpoint_type = public {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.969672] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.barbican_region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.969914] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.970164] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.970427] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.970675] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.970922] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.971178] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.number_of_retries = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.971429] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.retry_delay = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.971676] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.send_service_user_token = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.971917] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.972169] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.972417] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.verify_ssl = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.972660] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican.verify_ssl_path = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.972903] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.973159] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.auth_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.973399] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.973640] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.973886] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.974144] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.974390] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.974635] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.974871] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] barbican_service_user.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.975132] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.approle_role_id = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.975379] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.approle_secret_id = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.975656] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.kv_mountpoint = secret {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.975901] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.kv_path = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.976155] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.kv_version = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.976396] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.namespace = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.976638] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.root_token_id = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.976874] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.ssl_ca_crt_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.977134] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.timeout = 60.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.977380] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.use_ssl = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.977633] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.977879] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.978133] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.978385] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.978630] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.978871] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.979126] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.979375] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.979617] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.979859] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.980121] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.980399] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.980580] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.980750] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.980923] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.service_type = identity {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.981104] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.981269] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.981466] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.981628] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.981808] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.981968] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] keystone.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.982184] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.connection_uri = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.982352] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_mode = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.982530] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_model_extra_flags = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.982691] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_models = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.982862] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_power_governor_high = performance {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.983042] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_power_governor_low = powersave {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.983212] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_power_management = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.983386] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.983555] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.device_detach_attempts = 8 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.983716] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.device_detach_timeout = 20 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.983881] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.disk_cachemodes = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.984063] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.disk_prefix = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.984232] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.enabled_perf_events = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.984393] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.file_backed_memory = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.984557] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.gid_maps = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.984714] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.hw_disk_discard = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.984868] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.hw_machine_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.985045] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_rbd_ceph_conf = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.985215] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.985380] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.985550] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_rbd_glance_store_name = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.985716] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_rbd_pool = rbd {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.985880] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_type = default {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986045] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.images_volume_group = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986207] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.inject_key = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986364] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.inject_partition = -2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986521] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.inject_password = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986679] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.iscsi_iface = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986834] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.iser_use_multipath = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.986993] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_bandwidth = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.987164] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_completion_timeout = 800 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.987323] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_downtime = 500 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.987480] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_downtime_delay = 75 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.987637] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_downtime_steps = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.987792] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_inbound_addr = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.987949] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_permit_auto_converge = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.988120] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_permit_post_copy = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.988282] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_scheme = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.988454] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_timeout_action = abort {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.988616] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_tunnelled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.988772] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_uri = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.988930] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.live_migration_with_native_tls = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.989105] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.max_queues = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.989270] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.mem_stats_period_seconds = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.989494] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.989660] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.nfs_mount_options = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.989966] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.990155] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.num_aoe_discover_tries = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.990319] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.num_iser_scan_tries = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.990504] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.num_memory_encrypted_guests = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.990674] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.num_nvme_discover_tries = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.990838] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.num_pcie_ports = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.991009] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.num_volume_scan_tries = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.991179] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.pmem_namespaces = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.991345] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.quobyte_client_cfg = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.991667] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.991843] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rbd_connect_timeout = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.992027] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.992195] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.992357] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rbd_secret_uuid = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.992519] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rbd_user = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.992678] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.realtime_scheduler_priority = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.992847] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.remote_filesystem_transport = ssh {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.993012] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rescue_image_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.993175] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rescue_kernel_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.993329] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rescue_ramdisk_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.993493] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rng_dev_path = /dev/urandom {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.993651] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.rx_queue_size = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.993814] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.smbfs_mount_options = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.994122] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.994299] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.snapshot_compression = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.994462] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.snapshot_image_format = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.994683] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.994847] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.sparse_logical_volumes = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995015] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.swtpm_enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995189] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.swtpm_group = tss {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995353] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.swtpm_user = tss {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995520] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.sysinfo_serial = unique {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995675] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.tb_cache_size = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995829] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.tx_queue_size = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.995987] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.uid_maps = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.996160] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.use_virtio_for_bridges = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.996328] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.virt_type = kvm {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.996493] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.volume_clear = zero {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.996652] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.volume_clear_size = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.996813] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.volume_use_multipath = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.996968] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_cache_path = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.997145] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.997307] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_mount_group = qemu {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.997465] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_mount_opts = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.997663] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.997988] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.998190] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.vzstorage_mount_user = stack {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.998357] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.998536] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.998711] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.auth_type = password {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.998871] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.999041] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.999207] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.999363] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.999523] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.999692] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.default_floating_pool = public {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 506.999853] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.000019] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.extension_sync_interval = 600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.000187] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.http_retries = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.000350] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.000531] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.000700] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.000881] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.metadata_proxy_shared_secret = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.001051] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.001225] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.ovs_bridge = br-int {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.001412] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.physnets = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.001599] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.region_name = RegionOne {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.001765] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.001933] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.service_metadata_proxy = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.002107] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.002275] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.service_type = network {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.002435] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.002593] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.002748] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.002905] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.003093] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.003257] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] neutron.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.003428] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] notifications.bdms_in_notifications = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.003606] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] notifications.default_level = INFO {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.003779] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] notifications.notification_format = unversioned {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.003940] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] notifications.notify_on_state_change = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.004126] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.004300] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] pci.alias = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.004466] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] pci.device_spec = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.004627] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] pci.report_in_placement = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.004797] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.004964] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.auth_type = password {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.005146] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.auth_url = http://10.180.1.21/identity {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.005305] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.005461] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.005622] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.005779] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.005935] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.006102] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.default_domain_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.006261] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.default_domain_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.006418] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.domain_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.006573] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.domain_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.006729] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.006887] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.007053] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.007215] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.007370] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.007535] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.password = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.007691] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.project_domain_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.007855] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.project_domain_name = Default {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.008028] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.project_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.008204] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.project_name = service {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.008370] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.region_name = RegionOne {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.008535] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.008693] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.008859] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.service_type = placement {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009028] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009192] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009351] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009513] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.system_scope = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009668] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009824] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.trust_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.009979] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.user_domain_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.010165] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.user_domain_name = Default {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.010328] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.user_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.010528] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.username = nova {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.010715] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.010878] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] placement.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.011074] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.cores = 20 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.011245] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.count_usage_from_placement = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.011438] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.011619] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.injected_file_content_bytes = 10240 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.011785] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.injected_file_path_length = 255 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.011948] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.injected_files = 5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.012125] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.instances = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.012290] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.key_pairs = 100 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.012453] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.metadata_items = 128 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.012617] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.ram = 51200 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.012778] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.recheck_quota = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.012942] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.server_group_members = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.013116] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] quota.server_groups = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.013291] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.013451] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.013613] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.image_metadata_prefilter = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.013769] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.013926] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.max_attempts = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.014097] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.max_placement_results = 1000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.014259] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.014416] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.query_placement_for_image_type_support = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.014574] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.014744] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] scheduler.workers = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.014915] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.015094] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.015273] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.015438] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.015605] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.015767] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.015926] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.016118] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.016287] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.host_subset_size = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.016452] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.016610] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.image_properties_default_architecture = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.016767] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.016925] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.isolated_hosts = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.017094] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.isolated_images = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.017255] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.max_instances_per_host = 50 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.017411] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.017570] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.017727] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.pci_in_placement = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.017882] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018049] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018208] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018365] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018522] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018676] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018830] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.track_instance_changes = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.018998] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.019178] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metrics.required = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.019335] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metrics.weight_multiplier = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.019492] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metrics.weight_of_unavailable = -10000.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.019650] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] metrics.weight_setting = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.019956] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.020141] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] serial_console.enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.020314] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] serial_console.port_range = 10000:20000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.020508] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.020677] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.020844] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] serial_console.serialproxy_port = 6083 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.021013] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.021195] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.auth_type = password {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.021363] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.021533] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.021694] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.021849] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.022007] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.022184] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.send_service_user_token = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.022345] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.022500] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] service_user.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.022678] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.agent_enabled = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.022836] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.023163] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.023367] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.html5proxy_host = 0.0.0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.023539] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.html5proxy_port = 6082 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.023696] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.image_compression = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.023849] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.jpeg_compression = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024009] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.playback_compression = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024176] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.require_secure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024339] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.server_listen = 127.0.0.1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024511] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024679] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.streaming_mode = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024835] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] spice.zlib_compression = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.024994] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] upgrade_levels.baseapi = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.025173] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] upgrade_levels.compute = auto {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.025330] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] upgrade_levels.conductor = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.025482] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] upgrade_levels.scheduler = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.025647] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.025802] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.auth_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.025957] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.026123] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.026282] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.026438] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.026593] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.026747] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.026898] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vendordata_dynamic_auth.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.027074] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.api_retry_count = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.027232] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.ca_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.027397] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.cache_prefix = devstack-image-cache {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.027563] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.cluster_name = testcl1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.027720] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.connection_pool_size = 10 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.027872] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.console_delay_seconds = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.028046] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.datastore_regex = ^datastore.* {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.028255] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.028423] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.host_password = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.028589] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.host_port = 443 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.028751] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.host_username = administrator@vsphere.local {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.028912] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.insecure = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.029081] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.integration_bridge = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.029244] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.maximum_objects = 100 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.029400] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.pbm_default_policy = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.029559] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.pbm_enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.029711] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.pbm_wsdl_location = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.029873] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.030037] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.serial_port_proxy_uri = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.030195] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.serial_port_service_uri = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.030381] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.task_poll_interval = 0.5 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.030555] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.use_linked_clone = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.030724] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.vnc_keymap = en-us {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.030887] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.vnc_port = 5900 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.031059] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vmware.vnc_port_total = 10000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.031240] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.auth_schemes = ['none'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.031418] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.031740] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.031928] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.032121] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.novncproxy_port = 6080 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.032309] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.server_listen = 127.0.0.1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.032488] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.032647] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.vencrypt_ca_certs = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.032801] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.vencrypt_client_cert = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.032953] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vnc.vencrypt_client_key = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.033144] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.033305] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.disable_deep_image_inspection = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.033463] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.disable_fallback_pcpu_query = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.033621] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.disable_group_policy_check_upcall = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.033776] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.033932] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.disable_rootwrap = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.034103] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.enable_numa_live_migration = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.034263] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.034420] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.034578] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.handle_virt_lifecycle_events = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.034734] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.libvirt_disable_apic = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.034890] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.never_download_image_if_on_rbd = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035476] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035476] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035476] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035608] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035680] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035803] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.035959] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.036142] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.036285] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.036462] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.036627] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.client_socket_timeout = 900 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.036786] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.default_pool_size = 1000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.036945] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.keep_alive = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.037120] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.max_header_line = 16384 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.037281] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.secure_proxy_ssl_header = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.037439] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.ssl_ca_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.037600] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.ssl_cert_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.037751] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.ssl_key_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.037908] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.tcp_keepidle = 600 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.038090] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.038257] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] zvm.ca_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.038409] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] zvm.cloud_connector_url = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.038696] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.038866] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] zvm.reachable_timeout = 300 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.039054] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.enforce_new_defaults = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.039429] env[63371]: WARNING oslo_config.cfg [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 507.039618] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.enforce_scope = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.039791] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.policy_default_rule = default {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.039966] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.040151] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.policy_file = policy.yaml {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.040326] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.040514] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.040680] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.040836] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.040994] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.041176] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_policy.remote_timeout = 60.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.041351] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.041568] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.041754] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.connection_string = messaging:// {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.041920] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.enabled = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.042102] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.es_doc_type = notification {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.042266] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.es_scroll_size = 10000 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.042429] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.es_scroll_time = 2m {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.042591] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.filter_error_trace = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.042756] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.hmac_keys = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.042919] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.sentinel_service_name = mymaster {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.043092] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.socket_timeout = 0.1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.043254] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.trace_requests = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.043408] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler.trace_sqlalchemy = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.043585] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler_jaeger.process_tags = {} {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.043742] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler_jaeger.service_name_prefix = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.043899] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] profiler_otlp.service_name_prefix = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.044072] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] remote_debug.host = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.044232] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] remote_debug.port = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.044407] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.044565] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.044722] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.044876] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045041] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045201] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045359] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045516] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045673] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045838] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.hostname = devstack {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.045995] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.046175] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.046339] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.046503] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.046699] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.046918] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.047118] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.047331] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.047508] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.047674] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.047837] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.047999] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.048172] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.048336] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.048491] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.048651] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.048802] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.048959] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.049165] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.049398] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.ssl = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.049584] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.049800] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.049971] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.050156] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.050327] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.ssl_version = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.050519] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.050732] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.050983] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_notifications.retry = -1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.051245] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.051445] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_messaging_notifications.transport_url = **** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.051623] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.auth_section = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.051786] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.auth_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.051940] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.cafile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.052107] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.certfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.052268] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.collect_timing = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.052424] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.connect_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.052580] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.connect_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.052734] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.endpoint_id = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.052898] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.endpoint_interface = publicURL {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053062] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.endpoint_override = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053219] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.endpoint_region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053371] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.endpoint_service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053526] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.endpoint_service_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053683] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.insecure = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053834] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.keyfile = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.053985] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.max_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.054151] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.min_version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.054302] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.region_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.054452] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.retriable_status_codes = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.054606] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.service_name = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.054755] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.service_type = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.054909] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.split_loggers = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.055071] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.status_code_retries = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.055227] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.status_code_retry_delay = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.055377] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.timeout = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.055529] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.valid_interfaces = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.055680] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_limit.version = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.055892] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_reports.file_event_handler = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.056117] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_reports.file_event_handler_interval = 1 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.056311] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] oslo_reports.log_dir = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.056485] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.056643] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_linux_bridge_privileged.group = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.056798] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.056959] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.057135] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.057293] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_linux_bridge_privileged.user = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.057458] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.057612] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_ovs_privileged.group = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.057763] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_ovs_privileged.helper_command = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.057923] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.058108] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.058272] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] vif_plug_ovs_privileged.user = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.058437] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.flat_interface = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.058614] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.058784] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.058950] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.059130] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.059293] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.059455] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.059613] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_linux_bridge.vlan_interface = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.059785] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.059952] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.isolate_vif = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.060129] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.060291] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.060481] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.060651] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.ovsdb_interface = native {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.060812] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] os_vif_ovs.per_port_bridge = False {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.060981] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] privsep_osbrick.capabilities = [21] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.061154] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] privsep_osbrick.group = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.061310] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] privsep_osbrick.helper_command = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.061514] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.061688] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] privsep_osbrick.thread_pool_size = 8 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.061848] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] privsep_osbrick.user = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062029] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062196] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] nova_sys_admin.group = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062354] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] nova_sys_admin.helper_command = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062518] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062677] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] nova_sys_admin.thread_pool_size = 8 {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062833] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] nova_sys_admin.user = None {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 507.062963] env[63371]: DEBUG oslo_service.service [None req-afd9d5e1-2d06-4e58-af10-c22216f14dfc None None] ******************************************************************************** {{(pid=63371) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 507.063482] env[63371]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 507.567050] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Getting list of instances from cluster (obj){ [ 507.567050] env[63371]: value = "domain-c8" [ 507.567050] env[63371]: _type = "ClusterComputeResource" [ 507.567050] env[63371]: } {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 507.568097] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af89b2da-9a3d-4bcc-81d5-2638fe3d4f23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 507.577550] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Got total of 0 instances {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 507.578092] env[63371]: WARNING nova.virt.vmwareapi.driver [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 507.578580] env[63371]: INFO nova.virt.node [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Generated node identity c079ebb1-2fa2-4df9-bdab-118e305653c1 [ 507.578818] env[63371]: INFO nova.virt.node [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Wrote node identity c079ebb1-2fa2-4df9-bdab-118e305653c1 to /opt/stack/data/n-cpu-1/compute_id [ 508.081579] env[63371]: WARNING nova.compute.manager [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Compute nodes ['c079ebb1-2fa2-4df9-bdab-118e305653c1'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 509.089091] env[63371]: INFO nova.compute.manager [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 510.095224] env[63371]: WARNING nova.compute.manager [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 510.095544] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.095661] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 510.095830] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 510.095985] env[63371]: DEBUG nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 510.097268] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcb483aa-420c-4b1b-ad5a-646c0a9ce0d5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.105428] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373b3be6-c67e-4e4e-9f91-549470fad113 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.120228] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-266fd036-172c-4dd6-a024-7b5961631d30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.126385] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95246de-9643-4b71-8ef7-c3ff78047fb2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 510.154247] env[63371]: DEBUG nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181377MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 510.154391] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 510.154582] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 510.656789] env[63371]: WARNING nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] No compute node record for cpu-1:c079ebb1-2fa2-4df9-bdab-118e305653c1: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host c079ebb1-2fa2-4df9-bdab-118e305653c1 could not be found. [ 511.160380] env[63371]: INFO nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: c079ebb1-2fa2-4df9-bdab-118e305653c1 [ 512.668618] env[63371]: DEBUG nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 512.669048] env[63371]: DEBUG nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 512.858297] env[63371]: INFO nova.scheduler.client.report [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] [req-9ec11389-907b-4a4a-885b-3eaeea03e3f4] Created resource provider record via placement API for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 512.876433] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721d650d-e8a5-46b0-a768-83dad0a9e485 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.884293] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67cffddb-b6ee-49fe-83a6-746af8bd3c43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.914177] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df68eb08-2ae0-4f40-bd64-03c1e065bf3f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.921652] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9fe4fa-499f-4f58-86f1-1817e42dbb08 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.935784] env[63371]: DEBUG nova.compute.provider_tree [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 513.468379] env[63371]: DEBUG nova.scheduler.client.report [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 513.468642] env[63371]: DEBUG nova.compute.provider_tree [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 0 to 1 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 513.468786] env[63371]: DEBUG nova.compute.provider_tree [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 513.515905] env[63371]: DEBUG nova.compute.provider_tree [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 1 to 2 during operation: update_traits {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 514.020993] env[63371]: DEBUG nova.compute.resource_tracker [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 514.021378] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.867s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 514.021378] env[63371]: DEBUG nova.service [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Creating RPC server for service compute {{(pid=63371) start /opt/stack/nova/nova/service.py:186}} [ 514.035714] env[63371]: DEBUG nova.service [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] Join ServiceGroup membership for this service compute {{(pid=63371) start /opt/stack/nova/nova/service.py:203}} [ 514.035911] env[63371]: DEBUG nova.servicegroup.drivers.db [None req-fd8d98ed-4b20-426c-b221-f588b48a410a None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=63371) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 569.037565] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.038249] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.038463] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 569.038601] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 569.541517] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 569.543898] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.543898] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.543898] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.543898] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.543898] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 569.543898] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 570.045770] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Getting list of instances from cluster (obj){ [ 570.045770] env[63371]: value = "domain-c8" [ 570.045770] env[63371]: _type = "ClusterComputeResource" [ 570.045770] env[63371]: } {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 570.049249] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396fa62f-0da9-4738-830d-c38ee4938ebb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.056765] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Got total of 0 instances {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 570.057168] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 570.057521] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 570.057819] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 570.561091] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.561343] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.561542] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.561701] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 570.562612] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977ea8bb-be55-47b2-bcf4-40de692aeb80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.570468] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba63a4c-102b-40c2-adf3-790f2473170f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.583727] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b75cb6-31bf-4991-9ae3-90e8c1485897 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.589711] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17529120-0de0-4ed7-b0dd-b0a6b53cc994 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.617918] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181393MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 570.618063] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.618236] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 571.635870] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 571.636104] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 571.649720] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a849c4-0491-4cf6-81fd-e3bc2db328e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.657192] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-479da5b2-ac36-4322-9609-af3f717a58e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.685485] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1475a21e-4319-40ea-a352-07b9c9e836ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.692257] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581fc180-a485-48eb-9da9-440fc98d2393 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.704685] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 572.207699] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 572.713021] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 572.713427] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.095s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.713472] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 572.713802] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Getting list of instances from cluster (obj){ [ 572.713802] env[63371]: value = "domain-c8" [ 572.713802] env[63371]: _type = "ClusterComputeResource" [ 572.713802] env[63371]: } {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 572.714821] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fd51fb-e060-43b0-a44e-d4275e347444 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.723307] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Got total of 0 instances {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 626.109852] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.110289] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.615018] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 626.615254] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 626.615322] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 627.119720] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 627.120168] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.120168] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.120286] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.120440] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.120581] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.120718] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.120842] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 627.120974] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 627.623958] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.624221] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 627.624387] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 627.624537] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 627.625473] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b62e17-6dc3-478a-b856-05342af3223f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.633805] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae17e9c3-d0f8-4685-95e7-42ad761b42ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.648253] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4c2a10-030a-4353-8851-19e6d8184ff7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.655208] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3759736b-abd7-48c2-962c-32d714024afa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.684121] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181388MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 627.684341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 627.684506] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 628.702333] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 628.702609] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 628.715409] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a94e0aa-af5c-45e6-9865-ece5c4345bea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.723142] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2a2da8-b29e-41c7-bec8-0458ad569b72 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.751586] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244bb3ee-7e69-4dde-911c-21a269342c69 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.758212] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b76d4d3-de86-426a-9b82-a298687fbabb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.770611] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 629.273517] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 629.274785] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 629.274949] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 689.276805] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.277216] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.277404] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 689.277593] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 689.781284] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 689.781533] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.781672] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.781818] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.781960] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.782116] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.782251] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 689.782374] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 689.782510] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 690.285610] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.285984] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 690.286047] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 690.286179] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 690.287098] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49dce01-36b6-4233-9c53-6926eb8a9dc7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.295310] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335ae530-c587-4fc7-b735-9a175f5f201e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.308865] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345d2aef-66ff-417d-8e4c-606a6f350a76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.314971] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1de973a0-bd82-43ca-8453-a62bd5bce478 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.342833] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181380MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 690.342971] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 690.343167] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 691.361739] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 691.362094] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 691.374890] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c8ea87-3e6a-45d0-8c1f-c8c27354da13 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.382224] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560172fa-baf7-4ed0-b96b-3c6eba57a76e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.411969] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c04e1a7-9b95-4001-ad4d-20246384d009 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.418736] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cd4cc1a-1ef8-4222-ac30-9cd49aa373ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.431214] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 691.934407] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 691.935705] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 691.935891] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.084276] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 750.084768] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 750.591723] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 750.591723] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 750.591723] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 751.095356] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 751.095613] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.095782] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.095927] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.096087] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.096231] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.096371] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.096498] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 751.096632] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.604163] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.604430] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.604612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 751.604767] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 751.605680] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5170234d-57c6-4ef3-a51f-9753a57b75d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.614860] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825dc460-b75c-40b9-bbc2-a4297cda94bc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.628362] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51af56c-c7f9-4e2f-8356-ef716938e171 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.634444] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f969127-b21d-475d-8ec8-7b0b58f20bc5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.662114] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181385MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 751.662253] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.662427] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.680528] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 752.680528] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 752.695816] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65769b39-9be4-4282-beae-8f12dc7ba23d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.703796] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4b07061-2e51-442d-bb95-3603306ccb92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.733628] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef11c67-693a-4033-8a93-8c74c74ee560 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.740569] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db0950e-d7a2-48d6-b4b7-39ea94f955b7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.753202] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.256887] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 753.258187] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 753.258365] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.596s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.430573] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.430985] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 802.934558] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] There are 0 instances to clean {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 802.934854] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.934948] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances with incomplete migration {{(pid=63371) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 803.437298] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.940628] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 806.941211] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 806.941211] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 807.444358] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 807.444612] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.444778] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.444941] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.445102] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.445245] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.445369] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 807.445505] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 807.949162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 807.949609] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 807.949609] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.949821] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 807.950695] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924d7f0b-15a7-4ae6-9ff9-8b0c77fffad5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.958929] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd26996-2932-43f4-99bf-27c56a2149c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.972693] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25af5030-3ff3-4d4b-ba95-e20fe49e9da6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.978827] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fe5da37-4b2c-4e82-a09b-9f6ee675102c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.007100] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181388MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 808.007278] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.007425] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.025088] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 809.025349] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 809.037680] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6933970-6211-4a94-be3d-ddac8e0660c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.045234] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c13972a-6a7d-4edd-bd3e-e54e0d2dbbc2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.074234] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d121c365-dec6-4136-bc19-ad214e145a21 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.081099] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8abbf9c2-b397-4cec-8ed8-689b237e874e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.093568] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.596116] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 809.597367] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 809.597544] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 810.583578] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 810.583875] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 867.430476] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 867.430838] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.430983] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.431363] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 868.431363] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 868.934210] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 868.934437] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.934593] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.934744] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 868.934872] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 868.935034] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.438622] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.439020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.439067] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.439196] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 869.440096] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9ec18a1-8072-4f1b-95f2-aebc689d0333 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.448212] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922c40a3-ac07-41fe-a004-74fa51248e2d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.462612] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c62328ba-55c6-4db2-a931-ac644fec731b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.468732] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a50d9c-65ce-4837-b9b2-a8733b2681dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.496333] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181394MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 869.496482] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.496673] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.528296] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 870.528545] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 870.544214] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 870.558041] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 870.558220] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 870.571875] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 870.587247] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 870.597771] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c384e6-b475-46d1-8959-728ccea5f628 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.605068] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5743c0d-c1b8-45d7-aa8f-152a68e30bb1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.634158] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f39005-911c-4254-918f-075380a21396 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.640920] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03703ed3-a233-4294-a851-38ed3e5eeaa1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.653311] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.156092] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 871.157406] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 871.157590] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.661s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.654322] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 871.654671] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.425479] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.430858] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.431290] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.431290] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.431449] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.431528] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 928.431660] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 928.935013] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.935269] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.935433] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.935588] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 928.936518] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3baeb01b-9590-4da1-a015-12b03c24ba53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.944707] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5bb86aa-acb4-4a5b-baba-344782b58932 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.958385] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f6cf7d-1bab-4541-8b0f-990f1023cc86 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.964320] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1836a258-7a3e-4b69-8b11-32c63e87c7a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.993015] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181385MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 928.993153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 928.993324] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.010688] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 930.010924] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 930.023517] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19037273-33c8-4239-b80f-da0ffa568d50 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.030787] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06db7879-8916-4c52-9372-5e531a6cf418 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.059670] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da58ecc-9833-45c5-9c7b-c60e8b92e842 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.066510] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1c9cac-3cae-48be-bfd7-41efbfacb54b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.081121] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 930.583913] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 930.585196] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 930.585374] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.579486] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.579847] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.579847] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 932.579966] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 933.082686] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 933.082919] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 933.083113] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.430681] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.431148] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.431148] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 990.431283] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 990.934459] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 990.934706] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.934889] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.935062] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.935208] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.935351] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 990.935478] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 990.935615] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 991.438811] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.439178] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.439228] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.439364] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 991.440265] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b56b9245-f453-4413-8feb-ba4fcb6c30f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.450137] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294af45a-1ece-4c2a-91c7-096055997c1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.464380] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b88b03-6fd3-487a-bc1f-72fb0ec65510 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.470789] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a725c1fe-57df-40dd-89a9-56fedd91356f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.499356] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181395MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 991.499494] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.499674] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 992.518038] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 992.518289] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 992.531757] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06b1b56-e349-4605-8596-49d8a3143fe0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.539342] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61bc2ab4-9743-4304-b0d0-f32e2ba26fc5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.568424] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b67d8580-5ae0-4d1e-9268-4f91b3a180cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.575712] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a345437-4330-4589-aff9-2e78cf9579b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.588237] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.091708] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 993.093079] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 993.093260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 993.588927] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 994.094263] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.431519] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1050.934978] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.935240] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.935403] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1050.935557] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1050.936505] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ba99bb-570b-44e6-8495-beceb02ac914 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.945141] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0195fdc3-3e40-4abb-81ba-afe72624739b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.959012] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb5f335-3fdb-4cce-8c5a-a2670c1740e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.965020] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c1afb7-14bf-4ba0-956f-ee57fedb31fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.993527] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181381MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1050.993670] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.993897] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1052.013019] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1052.013019] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1052.023833] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13202d6-245f-4fcd-9322-444cb36cc418 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.031679] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51359e06-6364-4f4d-8406-8d201ea7d147 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.060405] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de3bcaba-b682-4f3a-a7c1-d2beb35c7de3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.066964] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06664c6-9042-4722-ae3d-74619930864c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.079347] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1052.583098] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1052.584356] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1052.584538] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1053.584899] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.585172] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.585326] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1053.585443] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1054.088465] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1054.088700] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.088834] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.088989] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.089178] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.089312] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.089460] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.089585] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1104.432585] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.432585] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1104.936645] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] There are 0 instances to clean {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1104.936645] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1104.936645] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances with incomplete migration {{(pid=63371) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1112.438268] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.438647] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.438647] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.438800] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.438956] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1112.439163] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1112.945569] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.945810] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.945973] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.946139] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1112.947110] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67b6ff4-633b-4926-a6b5-99d2f3dc4097 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.955339] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68425a3-b5d7-4300-9f49-281547992258 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.969332] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31148ec9-0578-472a-bae7-8825c850af7b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.975624] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73a389e4-5f84-45fb-b1d0-9748518d1c99 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.003668] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181384MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1113.003807] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1113.003986] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.025606] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1114.025897] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1114.039983] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aedad39-d930-4991-93a8-b8ba5f656e9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.047434] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d212bf-facf-4020-aaec-2459ed3da6c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.076145] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e9c73cd-f0db-4df4-a69a-a6874d6a9c9f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.086047] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de960e03-2b23-4020-9f55-ee0f04ac4fe0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.098674] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1114.601385] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1114.602668] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1114.602840] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.599s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1115.431306] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.936680] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.936887] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1115.936977] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1116.439623] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1116.439973] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.440120] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.440337] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.440513] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.046641] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1170.549818] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Getting list of instances from cluster (obj){ [ 1170.549818] env[63371]: value = "domain-c8" [ 1170.549818] env[63371]: _type = "ClusterComputeResource" [ 1170.549818] env[63371]: } {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1170.550955] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e632ddb-a60b-462a-a939-e1735b3942a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.560123] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Got total of 0 instances {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1171.430611] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.431069] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1171.934135] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.934421] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.934563] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.934714] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1171.935646] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3a7835-5904-42e3-8fb4-fc5e9465e1eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.944073] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d5d0e3-a4aa-4136-af1a-0bda24fe2e76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.957930] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30e0b97-14ee-40d5-a7d4-f12c52802f77 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.964236] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871d8078-0984-46bb-8039-e7fc0f1f5793 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.992515] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181375MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1171.992641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.992832] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1173.107421] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1173.107675] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1173.122608] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1173.133620] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1173.133831] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1173.143030] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1173.156542] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1173.167238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57debed-f04d-4def-9536-10e657cd14a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.174431] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a8aaa9-a01c-4fb0-92aa-c573dcdf9a61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.202766] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e85857-8636-4e4e-95b8-626d021ab6fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.209352] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a8d93f-8f06-4507-8f0f-90c999d25dd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.221763] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1173.725056] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1173.726241] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1173.726483] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.734s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1174.721505] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.721894] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.721987] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.722139] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.722278] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.722418] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.722605] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1175.431826] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.431990] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1175.432108] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1175.935532] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1231.431670] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1231.934925] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.935192] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.935339] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.935487] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1231.936388] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d890075-a8b8-4c0c-a5e4-5c685cd76900 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.944626] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b951bf-3405-4094-95ce-04c630323372 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.958680] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e441ed8-b2d9-4678-a2d0-e025a2cc924a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.964771] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6046214b-b175-408f-a9f1-a6a87258f4f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.992274] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181387MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1231.992428] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.992594] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1233.011893] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1233.012227] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1233.025720] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e127875-3761-4ed1-8ce4-7e26583114df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.033272] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c0f8f0-b2d8-4425-a624-e92f63ec5a35 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.062198] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7749c540-0c64-4a76-90b4-c1a480ac3ee7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.069466] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd68e68a-dce8-4591-bc36-8ea7aa02799d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.082190] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1233.585152] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1233.586525] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1233.586706] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1234.580298] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.580582] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.085530] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.085728] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.085844] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.085987] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.086151] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.086304] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1235.086437] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1237.432361] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1237.432824] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1237.432824] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1237.935844] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1292.431159] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1292.934566] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.935401] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.935705] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.935914] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1292.937512] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9adac518-ecc5-4651-9b34-f9918cddecd9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.954018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99effb12-ffed-4082-80c4-0e48c97fe12f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.968854] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950ef04d-5bdd-4786-9d66-00377974031d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.976164] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d81835b-34ab-456d-a070-f42c4da89e91 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.009731] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181390MB free_disk=166GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1293.010095] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1293.010479] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1294.672185] env[63371]: INFO nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance cffe6a79-ad7e-4488-b179-608a03c978aa has allocations against this compute host but is not found in the database. [ 1294.672185] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1294.672185] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1294.672185] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687fc318-142e-4bd0-ae65-46b1f2e8a3ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.672185] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d61006-4f1f-43bc-af19-b33b09eaba22 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.677730] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8996c4b-df67-450a-8215-192dd77748c1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.684656] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c022c4-58c0-4a2e-81e2-2b319b40384a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.699136] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1294.819295] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "cffe6a79-ad7e-4488-b179-608a03c978aa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1294.819948] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.203417] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1295.206021] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1295.206021] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.195s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1295.323950] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1295.870612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.870612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.871980] env[63371]: INFO nova.compute.claims [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1296.443863] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "a43fed87-5205-4148-834e-66778a90b7bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1296.444462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "a43fed87-5205-4148-834e-66778a90b7bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1296.952206] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1296.971822] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e99733-1acf-4a72-9fca-7091b3abd89b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.981223] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0efe60e-4f67-4594-8832-faf9b32cb16d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.016677] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd0ae29-60bc-4474-8843-03f9004a628f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.026055] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8106bd03-0f21-4383-87c7-458470b88675 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.049091] env[63371]: DEBUG nova.compute.provider_tree [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1297.206698] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.206698] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.206698] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.206698] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.206859] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.207135] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.207246] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.207405] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1297.225527] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.225733] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1297.436860] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.436860] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1297.436860] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1297.484650] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.553245] env[63371]: DEBUG nova.scheduler.client.report [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1297.729350] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1297.938724] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Skipping network cache update for instance because it is Building. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1297.938724] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1297.965880] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "ca202079-2eae-441e-80f6-e403497e137d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1297.965880] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "ca202079-2eae-441e-80f6-e403497e137d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.062319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.191s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1298.062319] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1298.067229] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.583s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.069799] env[63371]: INFO nova.compute.claims [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1298.267298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.330114] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "fc0715a1-a056-4a1b-a86e-959680effc97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.330114] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "fc0715a1-a056-4a1b-a86e-959680effc97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.380561] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "d9523239-79d1-434f-977a-e1f0e358c82b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1298.381134] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "d9523239-79d1-434f-977a-e1f0e358c82b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1298.472936] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1298.580043] env[63371]: DEBUG nova.compute.utils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1298.580043] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1298.580043] env[63371]: DEBUG nova.network.neutron [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1298.833437] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1298.884428] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1299.002754] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.089224] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1299.212953] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79871264-7bf8-41d3-bd51-ace578111b87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.224501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825eaf91-3579-47c6-bb35-3b92379e6896 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.259287] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854ff6a8-5432-49b3-ad93-9c9061f8454e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.276549] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41979288-9832-4b26-b927-5c87a75d0f40 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.289533] env[63371]: DEBUG nova.compute.provider_tree [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.364099] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.428721] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1299.731054] env[63371]: DEBUG nova.policy [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b3af3bbd35846198784331994497179', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '857815a7f15648948bb4ca862473ed06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1299.791651] env[63371]: DEBUG nova.scheduler.client.report [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1300.110964] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1300.154862] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1300.154990] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1300.155162] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1300.156569] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1300.156569] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1300.156569] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1300.159134] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1300.159134] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1300.159134] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1300.159134] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1300.159409] env[63371]: DEBUG nova.virt.hardware [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1300.162241] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b724217c-cb28-4a26-a32e-77439c1aff3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.173022] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01391cf-c75b-43da-a9f3-f1f55ba21121 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.193806] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e807a9-d5b0-487b-a48f-f8942fec8c32 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.297684] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.231s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1300.298306] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1300.301015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.034s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1300.302621] env[63371]: INFO nova.compute.claims [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1300.807386] env[63371]: DEBUG nova.compute.utils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1300.809589] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1300.809589] env[63371]: DEBUG nova.network.neutron [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1300.859352] env[63371]: DEBUG nova.policy [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62266d81b3724a98b80b05cbb08227fa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7e27f48936d4019bd23bc30cd94f85b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1301.025407] env[63371]: DEBUG nova.network.neutron [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Successfully created port: d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1301.180482] env[63371]: DEBUG nova.network.neutron [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Successfully created port: 85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1301.287356] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1301.287595] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1301.316091] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1301.459341] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82dea60c-a0ac-4e8f-bd08-96a891315ae4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.468314] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b791cc5f-1792-4fb0-b9bc-61cd925336b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.504249] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9478e567-0267-4555-bf78-b5ad61a19e29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.513202] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90322a27-050a-48bf-9883-e4216641f101 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.528812] env[63371]: DEBUG nova.compute.provider_tree [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.789594] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1302.033905] env[63371]: DEBUG nova.scheduler.client.report [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1302.141408] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "47c1c242-d190-4523-8033-307c5a9b7535" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.141645] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "47c1c242-d190-4523-8033-307c5a9b7535" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.332175] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1302.335856] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.369100] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1302.369374] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1302.369526] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1302.369782] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1302.370765] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1302.370765] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1302.370956] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1302.371191] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1302.371339] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1302.371519] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1302.371689] env[63371]: DEBUG nova.virt.hardware [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1302.372717] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d3aaa6d-c3e0-4637-8b9f-30651f4084a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.382548] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad15e87-5fb1-4805-ba7d-1a161db3a877 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.544526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.242s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1302.546162] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1302.550852] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.548s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1302.552436] env[63371]: INFO nova.compute.claims [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1302.644628] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1302.695130] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1302.695255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1303.065420] env[63371]: DEBUG nova.compute.utils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1303.068368] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1303.068368] env[63371]: DEBUG nova.network.neutron [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1303.184424] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.199010] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1303.213759] env[63371]: DEBUG nova.policy [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd0d15de96e1a4ed994bbb2226d7a3da1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a2d459d8cd874202a489beb816804cc8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1303.572191] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1303.789844] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1303.845840] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a59e33-0afa-4ba0-ac7f-37bba63c8086 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.853350] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32db03ab-3e6d-4ec2-a264-75a1dfb6a315 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.883867] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c2810e-3868-4d10-b241-6fcbdaed85e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.891473] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4deae797-8209-414d-a037-1331f44b5110 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.904490] env[63371]: DEBUG nova.compute.provider_tree [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1304.198355] env[63371]: DEBUG nova.network.neutron [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Successfully updated port: d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1304.218321] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "201a2d1e-9e2c-4c07-92be-200408874ad4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1304.219157] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.004s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.357249] env[63371]: DEBUG nova.network.neutron [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Successfully created port: 8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1304.407563] env[63371]: DEBUG nova.scheduler.client.report [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1304.586833] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1304.624561] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1304.625395] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1304.625395] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1304.625395] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1304.625395] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1304.625600] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1304.626700] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1304.626954] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1304.627159] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1304.627333] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1304.627506] env[63371]: DEBUG nova.virt.hardware [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1304.628419] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5723447f-d07f-4f98-923a-681b0f3198a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.638485] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1c83e3-34d1-4ee0-8337-304b283e033a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.702420] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "refresh_cache-cffe6a79-ad7e-4488-b179-608a03c978aa" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1304.702530] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired lock "refresh_cache-cffe6a79-ad7e-4488-b179-608a03c978aa" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.702753] env[63371]: DEBUG nova.network.neutron [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1304.724633] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1304.915296] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.362s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1304.915820] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1304.922025] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.559s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1304.922025] env[63371]: INFO nova.compute.claims [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1305.249397] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.290946] env[63371]: DEBUG nova.network.neutron [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1305.310269] env[63371]: DEBUG nova.network.neutron [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Successfully updated port: 85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1305.369699] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "33cf00ea-3195-41cf-9b7a-a8e64496a122" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1305.369980] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1305.422533] env[63371]: DEBUG nova.compute.utils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1305.427109] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1305.427109] env[63371]: DEBUG nova.network.neutron [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1305.617681] env[63371]: DEBUG nova.policy [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '892edb1b01bf4c4b84dbddd3baeb7761', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdeddf941ce24613ad019cc1202a294b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1305.675078] env[63371]: DEBUG nova.network.neutron [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Updating instance_info_cache with network_info: [{"id": "d2c4ae08-b10f-4881-8089-d2c46693937b", "address": "fa:16:3e:e9:8a:c1", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.159", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2c4ae08-b1", "ovs_interfaceid": "d2c4ae08-b10f-4881-8089-d2c46693937b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.816312] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1305.816312] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquired lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.816312] env[63371]: DEBUG nova.network.neutron [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1305.928722] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1306.152371] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f4d298-e387-45e9-a65b-e5bf1b408f93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.161539] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2657a05-c618-4a81-b6f0-63c16874a4f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.195045] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Releasing lock "refresh_cache-cffe6a79-ad7e-4488-b179-608a03c978aa" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1306.196046] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Instance network_info: |[{"id": "d2c4ae08-b10f-4881-8089-d2c46693937b", "address": "fa:16:3e:e9:8a:c1", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.159", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2c4ae08-b1", "ovs_interfaceid": "d2c4ae08-b10f-4881-8089-d2c46693937b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1306.196158] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e9:8a:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2c4ae08-b10f-4881-8089-d2c46693937b', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1306.208523] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1306.209518] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f306b2e5-9eee-414f-8d4c-108723a2a5e4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.215047] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47bb2c74-bbd8-4435-9f88-3cc2b1725ccf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.233667] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276fc9d3-521b-429c-9a3e-18b45a8ed058 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.253025] env[63371]: DEBUG nova.compute.provider_tree [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1306.255962] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Created folder: OpenStack in parent group-v4. [ 1306.255962] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating folder: Project (857815a7f15648948bb4ca862473ed06). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1306.255962] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc09978b-a6b7-4828-b372-7e62abcbb9d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.269710] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Created folder: Project (857815a7f15648948bb4ca862473ed06) in parent group-v368199. [ 1306.269930] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating folder: Instances. Parent ref: group-v368200. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1306.270197] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64cf9386-d948-4233-b623-ebe46acf2abd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.280612] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Created folder: Instances in parent group-v368200. [ 1306.280880] env[63371]: DEBUG oslo.service.loopingcall [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1306.281524] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1306.281565] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-daf5ee01-89ce-4c1e-80fd-8ca2b9ea4f52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.302579] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1306.302579] env[63371]: value = "task-1773470" [ 1306.302579] env[63371]: _type = "Task" [ 1306.302579] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.314015] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773470, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.335164] env[63371]: DEBUG nova.network.neutron [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Successfully created port: eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1306.521493] env[63371]: DEBUG nova.network.neutron [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1306.760089] env[63371]: DEBUG nova.scheduler.client.report [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1306.812337] env[63371]: DEBUG nova.network.neutron [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updating instance_info_cache with network_info: [{"id": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "address": "fa:16:3e:52:9a:b1", "network": {"id": "c7291076-10b1-479b-a360-a5d60b016548", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-373506764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e27f48936d4019bd23bc30cd94f85b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85189d02-f6", "ovs_interfaceid": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.820799] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773470, 'name': CreateVM_Task, 'duration_secs': 0.376152} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.820969] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1306.832506] env[63371]: DEBUG oslo_vmware.service [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a97d095a-951b-4197-b750-6b9a6134d5d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.841227] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1306.841463] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.843239] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1306.844056] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6896886d-842c-46a8-be2d-a48a9a962858 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.851609] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1306.851609] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52acae75-ba31-2238-c0fb-2fb0211af0c0" [ 1306.851609] env[63371]: _type = "Task" [ 1306.851609] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.860831] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52acae75-ba31-2238-c0fb-2fb0211af0c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.950940] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1306.983116] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1306.983377] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1306.983724] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1306.983724] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1306.983898] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1306.983957] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1306.984731] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1306.984918] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1306.985119] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1306.985290] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1306.985464] env[63371]: DEBUG nova.virt.hardware [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1306.986347] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44078da-8778-43ea-bcf9-3ce7780d5c08 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.995384] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf5c3bb-f80c-426b-823d-5c08beaafc3c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.133997] env[63371]: DEBUG nova.compute.manager [req-d87fa9c6-7866-47d5-9b24-3d16bcbba5a2 req-942d4e65-7edc-48ec-99bd-a916a4f6d2a1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Received event network-vif-plugged-d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1307.133997] env[63371]: DEBUG oslo_concurrency.lockutils [req-d87fa9c6-7866-47d5-9b24-3d16bcbba5a2 req-942d4e65-7edc-48ec-99bd-a916a4f6d2a1 service nova] Acquiring lock "cffe6a79-ad7e-4488-b179-608a03c978aa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.134178] env[63371]: DEBUG oslo_concurrency.lockutils [req-d87fa9c6-7866-47d5-9b24-3d16bcbba5a2 req-942d4e65-7edc-48ec-99bd-a916a4f6d2a1 service nova] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.134247] env[63371]: DEBUG oslo_concurrency.lockutils [req-d87fa9c6-7866-47d5-9b24-3d16bcbba5a2 req-942d4e65-7edc-48ec-99bd-a916a4f6d2a1 service nova] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.134409] env[63371]: DEBUG nova.compute.manager [req-d87fa9c6-7866-47d5-9b24-3d16bcbba5a2 req-942d4e65-7edc-48ec-99bd-a916a4f6d2a1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] No waiting events found dispatching network-vif-plugged-d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1307.134568] env[63371]: WARNING nova.compute.manager [req-d87fa9c6-7866-47d5-9b24-3d16bcbba5a2 req-942d4e65-7edc-48ec-99bd-a916a4f6d2a1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Received unexpected event network-vif-plugged-d2c4ae08-b10f-4881-8089-d2c46693937b for instance with vm_state building and task_state spawning. [ 1307.264183] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.264183] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1307.267766] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.839s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.271018] env[63371]: INFO nova.compute.claims [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1307.316610] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Releasing lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.316718] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Instance network_info: |[{"id": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "address": "fa:16:3e:52:9a:b1", "network": {"id": "c7291076-10b1-479b-a360-a5d60b016548", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-373506764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e27f48936d4019bd23bc30cd94f85b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85189d02-f6", "ovs_interfaceid": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1307.317222] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:9a:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd2f5e5e2-e460-49ce-aa24-232e4a8007af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85189d02-f613-4d29-a47a-b7c1ce74c9f3', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1307.325787] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Creating folder: Project (a7e27f48936d4019bd23bc30cd94f85b). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1307.326752] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8673b809-7e97-43cb-85e2-a7735d31dcf5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.337072] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Created folder: Project (a7e27f48936d4019bd23bc30cd94f85b) in parent group-v368199. [ 1307.337396] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Creating folder: Instances. Parent ref: group-v368203. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1307.337642] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b0e02fc7-5633-424e-91a1-f74813dda5be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.348441] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Created folder: Instances in parent group-v368203. [ 1307.348676] env[63371]: DEBUG oslo.service.loopingcall [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1307.348872] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1307.349098] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-871d21aa-2280-41a6-a030-b5b82440dac4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.382358] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1307.382782] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1307.383112] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1307.383263] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.384045] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1307.385482] env[63371]: DEBUG nova.compute.manager [req-4d9a4eee-e6e5-4be4-a288-f779cd4fd4c5 req-ae2701ee-b17b-4b91-8a7b-70068e64d745 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Received event network-vif-plugged-85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1307.386450] env[63371]: DEBUG oslo_concurrency.lockutils [req-4d9a4eee-e6e5-4be4-a288-f779cd4fd4c5 req-ae2701ee-b17b-4b91-8a7b-70068e64d745 service nova] Acquiring lock "a43fed87-5205-4148-834e-66778a90b7bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1307.386680] env[63371]: DEBUG oslo_concurrency.lockutils [req-4d9a4eee-e6e5-4be4-a288-f779cd4fd4c5 req-ae2701ee-b17b-4b91-8a7b-70068e64d745 service nova] Lock "a43fed87-5205-4148-834e-66778a90b7bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1307.386868] env[63371]: DEBUG oslo_concurrency.lockutils [req-4d9a4eee-e6e5-4be4-a288-f779cd4fd4c5 req-ae2701ee-b17b-4b91-8a7b-70068e64d745 service nova] Lock "a43fed87-5205-4148-834e-66778a90b7bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1307.387091] env[63371]: DEBUG nova.compute.manager [req-4d9a4eee-e6e5-4be4-a288-f779cd4fd4c5 req-ae2701ee-b17b-4b91-8a7b-70068e64d745 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] No waiting events found dispatching network-vif-plugged-85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1307.387379] env[63371]: WARNING nova.compute.manager [req-4d9a4eee-e6e5-4be4-a288-f779cd4fd4c5 req-ae2701ee-b17b-4b91-8a7b-70068e64d745 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Received unexpected event network-vif-plugged-85189d02-f613-4d29-a47a-b7c1ce74c9f3 for instance with vm_state building and task_state spawning. [ 1307.388257] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1307.388257] env[63371]: value = "task-1773473" [ 1307.388257] env[63371]: _type = "Task" [ 1307.388257] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.388585] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a372fb0b-23b7-41d8-937d-0491f2181f43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.404069] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773473, 'name': CreateVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.413209] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1307.413209] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1307.414265] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e327341-7e15-48f4-8322-6508b444d253 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.423624] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7946d777-3f9c-456e-8329-553aa5a94c4e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.428730] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1307.428730] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523fb129-fbf5-72c6-12d6-d2a7aa52d288" [ 1307.428730] env[63371]: _type = "Task" [ 1307.428730] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.438078] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523fb129-fbf5-72c6-12d6-d2a7aa52d288, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.529981] env[63371]: DEBUG nova.network.neutron [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Successfully updated port: 8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1307.782649] env[63371]: DEBUG nova.compute.utils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1307.786647] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1307.786647] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1307.876046] env[63371]: DEBUG nova.policy [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58195dc4ac74493cbe7ed4fbe63bce54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28cc236260a947899c5e09bca25f7360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1307.904225] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773473, 'name': CreateVM_Task, 'duration_secs': 0.335532} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.904225] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1307.904225] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1307.908021] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.908021] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1307.908021] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-511d2f5b-9517-4f75-8664-b9029aa5e768 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.910876] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1307.910876] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52be06f6-b316-7523-b62c-f54c466677e1" [ 1307.910876] env[63371]: _type = "Task" [ 1307.910876] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.919422] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52be06f6-b316-7523-b62c-f54c466677e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.944477] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Preparing fetch location {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1307.944614] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating directory with path [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1307.944993] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-34acdc5d-6d23-48fb-9b50-8755008c07fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.973382] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Created directory with path [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1307.973657] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Fetch image to [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1307.973892] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Downloading image file data 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 to [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk on the data store datastore1 {{(pid=63371) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1307.974771] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d817d70a-7fda-48f1-810d-bf49ed5ccda6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.986843] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046e5b88-b4b2-4517-9a50-61a79debf7a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.997679] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7006132-b080-4421-9435-bd8455aa66c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.032153] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ceb6274-5437-4287-9b32-1595f9e76dce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.035263] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "refresh_cache-3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.035395] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquired lock "refresh_cache-3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1308.035532] env[63371]: DEBUG nova.network.neutron [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1308.041358] env[63371]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c09b0297-bf4c-45f6-b0ac-9f25c4c319b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.069509] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Downloading image file data 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 to the data store datastore1 {{(pid=63371) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1308.138808] env[63371]: DEBUG oslo_vmware.rw_handles [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63371) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1308.291634] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1308.383503] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Successfully created port: 969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1308.434931] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1308.434931] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1308.435253] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1308.556874] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-484da872-29ab-4493-b92d-878fe73bb311 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.575872] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e699a7d-f7a0-4761-87f9-e7047f7c99f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.623804] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9fc495-90d6-46af-ab05-8d1ff488b558 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.638238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a1960a-f877-40f8-8136-0402da721b18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.644617] env[63371]: DEBUG nova.network.neutron [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1308.659092] env[63371]: DEBUG nova.compute.provider_tree [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1308.918035] env[63371]: DEBUG oslo_vmware.rw_handles [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Completed reading data from the image iterator. {{(pid=63371) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1308.918328] env[63371]: DEBUG oslo_vmware.rw_handles [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1308.979964] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Downloaded image file data 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 to vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk on the data store datastore1 {{(pid=63371) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1308.982146] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Caching image {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1308.982402] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Copying Virtual Disk [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk to [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1308.982681] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70366f22-f381-40ca-83f0-410cfb795750 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.994345] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1308.994345] env[63371]: value = "task-1773474" [ 1308.994345] env[63371]: _type = "Task" [ 1308.994345] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.005417] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773474, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.047134] env[63371]: DEBUG nova.network.neutron [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Updating instance_info_cache with network_info: [{"id": "8905eb18-7130-4195-b35c-38e03dd31b91", "address": "fa:16:3e:89:e7:88", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8905eb18-71", "ovs_interfaceid": "8905eb18-7130-4195-b35c-38e03dd31b91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1309.057966] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1309.058148] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.144632] env[63371]: DEBUG nova.network.neutron [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Successfully updated port: eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1309.165531] env[63371]: DEBUG nova.scheduler.client.report [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1309.308924] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1309.386521] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1309.386521] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1309.386521] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1309.386790] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1309.386790] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1309.386790] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1309.390628] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1309.390628] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1309.390628] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1309.390628] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1309.390628] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1309.391439] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-613b43b6-d79a-431b-9aeb-2200b2d6e4c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.406552] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c7223f-e756-41e8-9495-230d02885cd8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.509460] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773474, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.549295] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Releasing lock "refresh_cache-3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1309.549633] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Instance network_info: |[{"id": "8905eb18-7130-4195-b35c-38e03dd31b91", "address": "fa:16:3e:89:e7:88", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8905eb18-71", "ovs_interfaceid": "8905eb18-7130-4195-b35c-38e03dd31b91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1309.550197] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:e7:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8905eb18-7130-4195-b35c-38e03dd31b91', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1309.557718] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Creating folder: Project (a2d459d8cd874202a489beb816804cc8). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1309.558050] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f6c67633-26c7-4729-a0d1-899b17406a37 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.570363] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Created folder: Project (a2d459d8cd874202a489beb816804cc8) in parent group-v368199. [ 1309.570363] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Creating folder: Instances. Parent ref: group-v368206. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1309.570604] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12ddf7b6-e327-466d-9e0f-5f692a4960c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.581045] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Created folder: Instances in parent group-v368206. [ 1309.581045] env[63371]: DEBUG oslo.service.loopingcall [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1309.581222] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1309.581427] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-16e18c16-a8a0-4434-bf3b-82758ffd39e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.600644] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1309.600644] env[63371]: value = "task-1773477" [ 1309.600644] env[63371]: _type = "Task" [ 1309.600644] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.609314] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773477, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.649525] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "refresh_cache-ca202079-2eae-441e-80f6-e403497e137d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1309.650436] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquired lock "refresh_cache-ca202079-2eae-441e-80f6-e403497e137d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.650658] env[63371]: DEBUG nova.network.neutron [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1309.672283] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.404s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1309.672950] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1309.675843] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.340s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1309.677747] env[63371]: INFO nova.compute.claims [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1310.005011] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773474, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.656695} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.005502] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Copied Virtual Disk [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk to [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1310.005694] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleting the datastore file [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/tmp-sparse.vmdk {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1310.005952] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0cef5904-7663-44e4-8db4-f544154038f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.012636] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1310.012636] env[63371]: value = "task-1773478" [ 1310.012636] env[63371]: _type = "Task" [ 1310.012636] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.020898] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773478, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.057280] env[63371]: DEBUG nova.compute.manager [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Received event network-changed-d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1310.057280] env[63371]: DEBUG nova.compute.manager [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Refreshing instance network info cache due to event network-changed-d2c4ae08-b10f-4881-8089-d2c46693937b. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1310.057280] env[63371]: DEBUG oslo_concurrency.lockutils [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] Acquiring lock "refresh_cache-cffe6a79-ad7e-4488-b179-608a03c978aa" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.057280] env[63371]: DEBUG oslo_concurrency.lockutils [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] Acquired lock "refresh_cache-cffe6a79-ad7e-4488-b179-608a03c978aa" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.057280] env[63371]: DEBUG nova.network.neutron [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Refreshing network info cache for port d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1310.112951] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773477, 'name': CreateVM_Task, 'duration_secs': 0.353244} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.113176] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1310.114216] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.115315] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.115315] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1310.119145] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16c52329-078c-47eb-8d02-de135ee65c8a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.125689] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1310.125689] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a911f5-8265-1c72-5c78-09b6a38270a3" [ 1310.125689] env[63371]: _type = "Task" [ 1310.125689] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.135409] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a911f5-8265-1c72-5c78-09b6a38270a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.183186] env[63371]: DEBUG nova.compute.utils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1310.191054] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1310.191250] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1310.198345] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Received event network-changed-85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1310.198345] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Refreshing instance network info cache due to event network-changed-85189d02-f613-4d29-a47a-b7c1ce74c9f3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1310.198345] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquiring lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.198345] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquired lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1310.198345] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Refreshing network info cache for port 85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1310.231496] env[63371]: DEBUG nova.network.neutron [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1310.303078] env[63371]: DEBUG nova.policy [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58195dc4ac74493cbe7ed4fbe63bce54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28cc236260a947899c5e09bca25f7360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1310.525177] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773478, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024217} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.525619] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1310.525619] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Moving file from [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 to [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9. {{(pid=63371) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1310.526142] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-70049fee-71ed-46eb-9f83-86ccf0081b76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.533437] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1310.533437] env[63371]: value = "task-1773479" [ 1310.533437] env[63371]: _type = "Task" [ 1310.533437] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.543791] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773479, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.638024] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1310.638024] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1310.638024] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1310.691492] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1310.922209] env[63371]: DEBUG nova.network.neutron [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Updating instance_info_cache with network_info: [{"id": "eb0a9632-9bb3-4855-8ad5-af6c7a628900", "address": "fa:16:3e:4b:62:86", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb0a9632-9b", "ovs_interfaceid": "eb0a9632-9bb3-4855-8ad5-af6c7a628900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.966133] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4c7082-c416-44dc-9930-eb84d84166ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.974527] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5586f96-e1f0-4352-b151-d2077f2bcedc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.014710] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633a9118-cf08-4a61-b32f-7aebe5ebd0b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.023180] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d55bbe7-027f-436f-ab28-2a8cfd193dfb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.038223] env[63371]: DEBUG nova.compute.provider_tree [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1311.047683] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773479, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.026407} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.047923] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] File moved {{(pid=63371) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1311.048149] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Cleaning up location [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1311.048992] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleting the datastore file [datastore1] vmware_temp/5c60a6b4-4335-46d5-9262-089a495c895a {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1311.048992] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e036020e-0f80-4803-8ce6-bc322ebd2e72 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.057586] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1311.057586] env[63371]: value = "task-1773480" [ 1311.057586] env[63371]: _type = "Task" [ 1311.057586] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.072292] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773480, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.233629] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Successfully updated port: 969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1311.266243] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Successfully created port: 6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1311.412200] env[63371]: DEBUG nova.network.neutron [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Updated VIF entry in instance network info cache for port d2c4ae08-b10f-4881-8089-d2c46693937b. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1311.412560] env[63371]: DEBUG nova.network.neutron [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Updating instance_info_cache with network_info: [{"id": "d2c4ae08-b10f-4881-8089-d2c46693937b", "address": "fa:16:3e:e9:8a:c1", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.159", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2c4ae08-b1", "ovs_interfaceid": "d2c4ae08-b10f-4881-8089-d2c46693937b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.429021] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Releasing lock "refresh_cache-ca202079-2eae-441e-80f6-e403497e137d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.429382] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Instance network_info: |[{"id": "eb0a9632-9bb3-4855-8ad5-af6c7a628900", "address": "fa:16:3e:4b:62:86", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb0a9632-9b", "ovs_interfaceid": "eb0a9632-9bb3-4855-8ad5-af6c7a628900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1311.430509] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:62:86', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb0a9632-9bb3-4855-8ad5-af6c7a628900', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1311.440017] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Creating folder: Project (cdeddf941ce24613ad019cc1202a294b). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1311.440017] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e10756c-9f64-4276-91f8-6ab691b5e716 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.450901] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Created folder: Project (cdeddf941ce24613ad019cc1202a294b) in parent group-v368199. [ 1311.451224] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Creating folder: Instances. Parent ref: group-v368209. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1311.451483] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42b7f1e7-d34f-4e51-bcfb-7abb37dd6da8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.463326] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Created folder: Instances in parent group-v368209. [ 1311.463804] env[63371]: DEBUG oslo.service.loopingcall [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1311.464122] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca202079-2eae-441e-80f6-e403497e137d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1311.464482] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b9611e7-2c7f-4c6c-921f-28051d4cf338 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.486846] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1311.486846] env[63371]: value = "task-1773483" [ 1311.486846] env[63371]: _type = "Task" [ 1311.486846] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.495478] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773483, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.545419] env[63371]: DEBUG nova.scheduler.client.report [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1311.573270] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773480, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024329} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.574270] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1311.575649] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3710362c-e0a8-40d9-8304-3cd08c64b34e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.582369] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1311.582369] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5209196a-47df-d5a9-cfb2-738f8ec4ea13" [ 1311.582369] env[63371]: _type = "Task" [ 1311.582369] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.596101] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5209196a-47df-d5a9-cfb2-738f8ec4ea13, 'name': SearchDatastore_Task, 'duration_secs': 0.008508} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.596101] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.596101] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cffe6a79-ad7e-4488-b179-608a03c978aa/cffe6a79-ad7e-4488-b179-608a03c978aa.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1311.596417] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.596682] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1311.596950] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1a9d429-c84b-42f9-9dd4-46f72456ed29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.599095] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94c6c62f-2d15-4933-8695-1d561e4a23de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.606119] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1311.606119] env[63371]: value = "task-1773484" [ 1311.606119] env[63371]: _type = "Task" [ 1311.606119] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.610451] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1311.610719] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1311.612184] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8307bbcf-20fc-4e32-a4f7-af5e44f9de3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.617814] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773484, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.620854] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1311.620854] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5243318a-51f4-f0e6-d25e-6a20352098de" [ 1311.620854] env[63371]: _type = "Task" [ 1311.620854] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.628741] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5243318a-51f4-f0e6-d25e-6a20352098de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.696610] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updated VIF entry in instance network info cache for port 85189d02-f613-4d29-a47a-b7c1ce74c9f3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1311.697077] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updating instance_info_cache with network_info: [{"id": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "address": "fa:16:3e:52:9a:b1", "network": {"id": "c7291076-10b1-479b-a360-a5d60b016548", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-373506764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e27f48936d4019bd23bc30cd94f85b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85189d02-f6", "ovs_interfaceid": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1311.705711] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1311.741213] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1311.741505] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1311.742550] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1311.742776] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1311.742943] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1311.743110] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1311.743343] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1311.743482] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1311.743684] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1311.743848] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1311.744054] env[63371]: DEBUG nova.virt.hardware [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1311.744657] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "refresh_cache-fc0715a1-a056-4a1b-a86e-959680effc97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1311.744788] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "refresh_cache-fc0715a1-a056-4a1b-a86e-959680effc97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1311.744932] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1311.747308] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6886e1b5-967b-45e2-826a-81777497c2fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.756919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1311.756919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1311.762966] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e3c320-111d-4381-b4ce-2df5034505a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.917027] env[63371]: DEBUG oslo_concurrency.lockutils [req-ba328348-4a92-4e1f-85f8-620d27e18559 req-b6792d2e-8db6-4aa1-b232-ba2af300d2b1 service nova] Releasing lock "refresh_cache-cffe6a79-ad7e-4488-b179-608a03c978aa" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1311.999137] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773483, 'name': CreateVM_Task, 'duration_secs': 0.377513} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.000540] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca202079-2eae-441e-80f6-e403497e137d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1312.000973] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1312.001169] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.004684] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1312.005759] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-999bdef6-4eb8-4ed3-93be-9d5b6b77cbf6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.010905] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1312.010905] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fea4f1-8258-e3d4-92f3-3802e0e3850b" [ 1312.010905] env[63371]: _type = "Task" [ 1312.010905] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.020826] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fea4f1-8258-e3d4-92f3-3802e0e3850b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.053965] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.378s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.057556] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1312.058958] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.875s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.060700] env[63371]: INFO nova.compute.claims [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1312.118205] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773484, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496473} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.119072] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cffe6a79-ad7e-4488-b179-608a03c978aa/cffe6a79-ad7e-4488-b179-608a03c978aa.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1312.119072] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1312.119072] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d942e14-ca95-4078-a447-cec723ecc188 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.130210] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5243318a-51f4-f0e6-d25e-6a20352098de, 'name': SearchDatastore_Task, 'duration_secs': 0.016445} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.132269] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1312.132269] env[63371]: value = "task-1773485" [ 1312.132269] env[63371]: _type = "Task" [ 1312.132269] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.132549] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1885803f-75a0-406a-aafd-5c4d25c1b0e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.141627] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1312.141627] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e37516-7277-358c-748f-e92b45364aba" [ 1312.141627] env[63371]: _type = "Task" [ 1312.141627] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.155799] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e37516-7277-358c-748f-e92b45364aba, 'name': SearchDatastore_Task, 'duration_secs': 0.009201} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.156354] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.156812] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] a43fed87-5205-4148-834e-66778a90b7bc/a43fed87-5205-4148-834e-66778a90b7bc.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1312.157175] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.157415] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1312.157690] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5175ed83-1d9c-4ce6-921d-285c14ad7ebd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.160646] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6e6a57a-0ddf-4808-9151-beb90f4d3823 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.166821] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1312.166821] env[63371]: value = "task-1773486" [ 1312.166821] env[63371]: _type = "Task" [ 1312.166821] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.172698] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1312.173024] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1312.174129] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5155e39-c1a9-49cc-99f7-c670da405fe7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.183227] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.187215] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1312.187215] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a23760-05fc-6987-0f07-43dffb1c2253" [ 1312.187215] env[63371]: _type = "Task" [ 1312.187215] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.194950] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a23760-05fc-6987-0f07-43dffb1c2253, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.200654] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Releasing lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.201082] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Received event network-vif-plugged-8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1312.201248] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquiring lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.201485] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.201661] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.201843] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] No waiting events found dispatching network-vif-plugged-8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1312.202058] env[63371]: WARNING nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Received unexpected event network-vif-plugged-8905eb18-7130-4195-b35c-38e03dd31b91 for instance with vm_state building and task_state spawning. [ 1312.202367] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Received event network-changed-8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1312.202585] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Refreshing instance network info cache due to event network-changed-8905eb18-7130-4195-b35c-38e03dd31b91. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1312.202792] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquiring lock "refresh_cache-3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1312.202931] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquired lock "refresh_cache-3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.203130] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Refreshing network info cache for port 8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1312.326529] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1312.523509] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fea4f1-8258-e3d4-92f3-3802e0e3850b, 'name': SearchDatastore_Task, 'duration_secs': 0.053215} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.523753] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1312.524058] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1312.524248] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1312.567758] env[63371]: DEBUG nova.compute.utils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1312.575477] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1312.575477] env[63371]: DEBUG nova.network.neutron [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1312.649543] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773485, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078019} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.649826] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1312.653204] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abfd05b-9ccd-4ec9-b434-358832ba10aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.691848] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] cffe6a79-ad7e-4488-b179-608a03c978aa/cffe6a79-ad7e-4488-b179-608a03c978aa.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1312.693581] env[63371]: DEBUG nova.policy [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '919325b57d54429a9bb73f64cd086373', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e7f96aff7d240469616d256291f7081', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1312.695923] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Updating instance_info_cache with network_info: [{"id": "969cd918-b804-4635-a828-8235c720e31b", "address": "fa:16:3e:9c:ad:dd", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969cd918-b8", "ovs_interfaceid": "969cd918-b804-4635-a828-8235c720e31b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1312.699828] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c6ea92f-3d05-4d6d-bb3d-b2d5b6209575 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.739204] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773486, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.742870] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1312.742870] env[63371]: value = "task-1773487" [ 1312.742870] env[63371]: _type = "Task" [ 1312.742870] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.752380] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a23760-05fc-6987-0f07-43dffb1c2253, 'name': SearchDatastore_Task, 'duration_secs': 0.00871} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.753551] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f20713a8-f594-4493-b33f-e146f5e0bba1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.761710] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773487, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.765040] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1312.765040] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52db1a82-6c75-a530-4c02-dbbeaa8cf2b7" [ 1312.765040] env[63371]: _type = "Task" [ 1312.765040] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1312.775586] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52db1a82-6c75-a530-4c02-dbbeaa8cf2b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.875823] env[63371]: DEBUG nova.compute.manager [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Received event network-vif-plugged-969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1312.876106] env[63371]: DEBUG oslo_concurrency.lockutils [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] Acquiring lock "fc0715a1-a056-4a1b-a86e-959680effc97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1312.876285] env[63371]: DEBUG oslo_concurrency.lockutils [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] Lock "fc0715a1-a056-4a1b-a86e-959680effc97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1312.876458] env[63371]: DEBUG oslo_concurrency.lockutils [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] Lock "fc0715a1-a056-4a1b-a86e-959680effc97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1312.876597] env[63371]: DEBUG nova.compute.manager [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] No waiting events found dispatching network-vif-plugged-969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1312.876863] env[63371]: WARNING nova.compute.manager [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Received unexpected event network-vif-plugged-969cd918-b804-4635-a828-8235c720e31b for instance with vm_state building and task_state spawning. [ 1312.878024] env[63371]: DEBUG nova.compute.manager [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Received event network-changed-969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1312.878311] env[63371]: DEBUG nova.compute.manager [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Refreshing instance network info cache due to event network-changed-969cd918-b804-4635-a828-8235c720e31b. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1312.878963] env[63371]: DEBUG oslo_concurrency.lockutils [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] Acquiring lock "refresh_cache-fc0715a1-a056-4a1b-a86e-959680effc97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1313.076176] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1313.186988] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773486, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.224128] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "refresh_cache-fc0715a1-a056-4a1b-a86e-959680effc97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1313.224128] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Instance network_info: |[{"id": "969cd918-b804-4635-a828-8235c720e31b", "address": "fa:16:3e:9c:ad:dd", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969cd918-b8", "ovs_interfaceid": "969cd918-b804-4635-a828-8235c720e31b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1313.225588] env[63371]: DEBUG oslo_concurrency.lockutils [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] Acquired lock "refresh_cache-fc0715a1-a056-4a1b-a86e-959680effc97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.225588] env[63371]: DEBUG nova.network.neutron [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Refreshing network info cache for port 969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1313.225714] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:ad:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '842f738f-eaa4-4444-a9bf-90d2b533184c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '969cd918-b804-4635-a828-8235c720e31b', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1313.239310] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Creating folder: Project (28cc236260a947899c5e09bca25f7360). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1313.243066] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-301ae52a-c1cb-4fba-bff3-d3469bee6735 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.255036] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773487, 'name': ReconfigVM_Task, 'duration_secs': 0.374237} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.258915] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Reconfigured VM instance instance-00000001 to attach disk [datastore1] cffe6a79-ad7e-4488-b179-608a03c978aa/cffe6a79-ad7e-4488-b179-608a03c978aa.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1313.259678] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Created folder: Project (28cc236260a947899c5e09bca25f7360) in parent group-v368199. [ 1313.259891] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Creating folder: Instances. Parent ref: group-v368215. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1313.262280] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce3087ec-f1ee-44c9-9f15-d3608093e784 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.264045] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d48b7f50-0634-47d2-bd35-0c6fac19b09c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.273097] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1313.273097] env[63371]: value = "task-1773493" [ 1313.273097] env[63371]: _type = "Task" [ 1313.273097] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.279713] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52db1a82-6c75-a530-4c02-dbbeaa8cf2b7, 'name': SearchDatastore_Task, 'duration_secs': 0.020036} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.281369] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1313.281623] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094/3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1313.281882] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Created folder: Instances in parent group-v368215. [ 1313.282102] env[63371]: DEBUG oslo.service.loopingcall [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1313.284578] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1313.284767] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1313.284974] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e879e347-77f6-4f0f-a378-6059c88fe9e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.286811] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1313.292108] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa098e64-8b7b-4912-81f9-c49fe26d966f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.292422] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f486bc8-38c7-4570-8fc9-cf9bf6bb81a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.309318] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773493, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.320237] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1313.320237] env[63371]: value = "task-1773495" [ 1313.320237] env[63371]: _type = "Task" [ 1313.320237] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.320237] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1313.320237] env[63371]: value = "task-1773496" [ 1313.320237] env[63371]: _type = "Task" [ 1313.320237] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.330440] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.335863] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773496, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.352165] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f24f5a0-8bca-4972-a0ed-f5d8593f16b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.359407] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91763cbf-a9d6-4623-aec9-584ac0fabdd2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.394448] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b63d2d8-a0cd-47cb-ab02-417da021e010 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.401065] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af63a8d-11ae-4b00-9ca9-90c50f909358 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.417144] env[63371]: DEBUG nova.compute.provider_tree [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1313.441032] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1313.441128] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1313.441890] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d36fc16-ec9e-4f1e-ab1a-ab6f23823af5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.447980] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1313.447980] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a5dbd9-9717-0c2d-4958-61f637cce207" [ 1313.447980] env[63371]: _type = "Task" [ 1313.447980] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.458073] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a5dbd9-9717-0c2d-4958-61f637cce207, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.659731] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Updated VIF entry in instance network info cache for port 8905eb18-7130-4195-b35c-38e03dd31b91. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1313.659731] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Updating instance_info_cache with network_info: [{"id": "8905eb18-7130-4195-b35c-38e03dd31b91", "address": "fa:16:3e:89:e7:88", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.223", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8905eb18-71", "ovs_interfaceid": "8905eb18-7130-4195-b35c-38e03dd31b91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1313.690190] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773486, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.784781] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773493, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.833665] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773495, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.836786] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773496, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1313.858974] env[63371]: DEBUG nova.network.neutron [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Successfully created port: 9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1313.921123] env[63371]: DEBUG nova.scheduler.client.report [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1313.967131] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a5dbd9-9717-0c2d-4958-61f637cce207, 'name': SearchDatastore_Task, 'duration_secs': 0.154322} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1313.967131] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4b8890b-ebe2-4206-89a6-250b9946ff8a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1313.973460] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1313.973460] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5272510e-b75d-1470-7fbf-871eeef952a5" [ 1313.973460] env[63371]: _type = "Task" [ 1313.973460] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1313.982965] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5272510e-b75d-1470-7fbf-871eeef952a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.097103] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1314.128580] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1314.128822] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1314.129038] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1314.129384] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1314.129586] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1314.129745] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1314.130059] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1314.130361] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1314.130552] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1314.130734] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1314.130946] env[63371]: DEBUG nova.virt.hardware [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1314.132240] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6add7197-36cf-4ca9-8bac-1b5b808ac8d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.145419] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659c17b4-8f18-4dd4-ac87-b00082902677 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.162116] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Releasing lock "refresh_cache-3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.162978] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Received event network-vif-plugged-eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1314.162978] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquiring lock "ca202079-2eae-441e-80f6-e403497e137d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1314.163257] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Lock "ca202079-2eae-441e-80f6-e403497e137d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1314.163746] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Lock "ca202079-2eae-441e-80f6-e403497e137d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1314.164095] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] No waiting events found dispatching network-vif-plugged-eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1314.164956] env[63371]: WARNING nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Received unexpected event network-vif-plugged-eb0a9632-9bb3-4855-8ad5-af6c7a628900 for instance with vm_state building and task_state spawning. [ 1314.164956] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Received event network-changed-eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1314.164956] env[63371]: DEBUG nova.compute.manager [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Refreshing instance network info cache due to event network-changed-eb0a9632-9bb3-4855-8ad5-af6c7a628900. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1314.164956] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquiring lock "refresh_cache-ca202079-2eae-441e-80f6-e403497e137d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.165323] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Acquired lock "refresh_cache-ca202079-2eae-441e-80f6-e403497e137d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.165544] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Refreshing network info cache for port eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1314.195486] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773486, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.561322} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.195682] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] a43fed87-5205-4148-834e-66778a90b7bc/a43fed87-5205-4148-834e-66778a90b7bc.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1314.196310] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1314.196310] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1682dd79-5a53-4da5-a717-824805eea1ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.205148] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1314.205148] env[63371]: value = "task-1773497" [ 1314.205148] env[63371]: _type = "Task" [ 1314.205148] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.219116] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773497, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.220217] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Successfully updated port: 6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1314.284362] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773493, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.311505] env[63371]: DEBUG nova.network.neutron [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Updated VIF entry in instance network info cache for port 969cd918-b804-4635-a828-8235c720e31b. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1314.311898] env[63371]: DEBUG nova.network.neutron [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Updating instance_info_cache with network_info: [{"id": "969cd918-b804-4635-a828-8235c720e31b", "address": "fa:16:3e:9c:ad:dd", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap969cd918-b8", "ovs_interfaceid": "969cd918-b804-4635-a828-8235c720e31b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.333582] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773495, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.341291] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773496, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.428604] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.370s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1314.429200] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1314.431968] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.642s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1314.436295] env[63371]: INFO nova.compute.claims [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1314.490429] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5272510e-b75d-1470-7fbf-871eeef952a5, 'name': SearchDatastore_Task, 'duration_secs': 0.069944} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.491095] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.494542] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] ca202079-2eae-441e-80f6-e403497e137d/ca202079-2eae-441e-80f6-e403497e137d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1314.494542] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e92d115-110a-4401-8f93-c4a828607479 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.501181] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1314.501181] env[63371]: value = "task-1773498" [ 1314.501181] env[63371]: _type = "Task" [ 1314.501181] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.511957] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773498, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.716746] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773497, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.724288] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "refresh_cache-d9523239-79d1-434f-977a-e1f0e358c82b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.724288] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "refresh_cache-d9523239-79d1-434f-977a-e1f0e358c82b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.724288] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1314.787254] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773493, 'name': Rename_Task, 'duration_secs': 1.142131} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.790011] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1314.790528] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e3502c6-7c5c-4f2c-ae5f-7301b87360d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.796551] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1314.796551] env[63371]: value = "task-1773499" [ 1314.796551] env[63371]: _type = "Task" [ 1314.796551] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.806659] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773499, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.814919] env[63371]: DEBUG oslo_concurrency.lockutils [req-2a687b2a-2ed1-494f-abba-8ac7dcad5a14 req-87a4a211-0865-4236-a020-660ad379f3d3 service nova] Releasing lock "refresh_cache-fc0715a1-a056-4a1b-a86e-959680effc97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.832769] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773495, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.415706} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.836282] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094/3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1314.836282] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1314.836282] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7985609b-4781-458e-9e04-130a6c48afbf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.839882] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773496, 'name': CreateVM_Task, 'duration_secs': 1.489722} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.840368] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1314.841784] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.842030] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.842337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1314.842588] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0018b442-8b4e-4f8c-ab33-f9d04b44e4f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.845381] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1314.845381] env[63371]: value = "task-1773500" [ 1314.845381] env[63371]: _type = "Task" [ 1314.845381] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.851492] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1314.851492] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5255aac8-6cbb-f376-aef9-39e8a919977c" [ 1314.851492] env[63371]: _type = "Task" [ 1314.851492] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.862304] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773500, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.871877] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5255aac8-6cbb-f376-aef9-39e8a919977c, 'name': SearchDatastore_Task, 'duration_secs': 0.012308} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1314.871877] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1314.871877] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1314.871877] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1314.872200] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1314.872200] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1314.872200] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e06c34a8-afee-4e77-bfa8-67068edc5bb3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.881433] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1314.882683] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1314.882683] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9294025-dc5d-4c7a-b072-5ce2e8057cb6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1314.888918] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1314.888918] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5245285a-70cc-deb8-092a-014866beb7f3" [ 1314.888918] env[63371]: _type = "Task" [ 1314.888918] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1314.899212] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5245285a-70cc-deb8-092a-014866beb7f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1314.942235] env[63371]: DEBUG nova.compute.utils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1314.949781] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1314.949781] env[63371]: DEBUG nova.network.neutron [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1315.018642] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773498, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.098772] env[63371]: DEBUG nova.policy [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c56ea345388e4739ae655edfa839c305', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c99d37d52edb40f99efb471da50f5845', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1315.137501] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Updated VIF entry in instance network info cache for port eb0a9632-9bb3-4855-8ad5-af6c7a628900. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1315.137501] env[63371]: DEBUG nova.network.neutron [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Updating instance_info_cache with network_info: [{"id": "eb0a9632-9bb3-4855-8ad5-af6c7a628900", "address": "fa:16:3e:4b:62:86", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.129", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb0a9632-9b", "ovs_interfaceid": "eb0a9632-9bb3-4855-8ad5-af6c7a628900", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.217098] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773497, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.562529} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.217382] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1315.218225] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271f3be6-5551-4c4e-acb7-17abca81522c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.244378] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] a43fed87-5205-4148-834e-66778a90b7bc/a43fed87-5205-4148-834e-66778a90b7bc.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1315.244742] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86edc88a-6525-4ed9-82ec-d188473f528d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.268993] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1315.268993] env[63371]: value = "task-1773501" [ 1315.268993] env[63371]: _type = "Task" [ 1315.268993] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.279032] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773501, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.306793] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773499, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.326399] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1315.365630] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773500, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091117} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.365630] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1315.366262] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e419c82-6260-427c-84c9-81741868e445 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.409915] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094/3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1315.412435] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6da92b2c-1281-4140-b678-f9f49f335d8c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.432374] env[63371]: DEBUG nova.compute.manager [req-d8e70d9a-c75e-4fca-a1c0-3d502e287ab4 req-8ecadf33-1246-45d7-8803-c2d12c407256 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Received event network-vif-plugged-6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1315.432662] env[63371]: DEBUG oslo_concurrency.lockutils [req-d8e70d9a-c75e-4fca-a1c0-3d502e287ab4 req-8ecadf33-1246-45d7-8803-c2d12c407256 service nova] Acquiring lock "d9523239-79d1-434f-977a-e1f0e358c82b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.432939] env[63371]: DEBUG oslo_concurrency.lockutils [req-d8e70d9a-c75e-4fca-a1c0-3d502e287ab4 req-8ecadf33-1246-45d7-8803-c2d12c407256 service nova] Lock "d9523239-79d1-434f-977a-e1f0e358c82b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.433186] env[63371]: DEBUG oslo_concurrency.lockutils [req-d8e70d9a-c75e-4fca-a1c0-3d502e287ab4 req-8ecadf33-1246-45d7-8803-c2d12c407256 service nova] Lock "d9523239-79d1-434f-977a-e1f0e358c82b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1315.433780] env[63371]: DEBUG nova.compute.manager [req-d8e70d9a-c75e-4fca-a1c0-3d502e287ab4 req-8ecadf33-1246-45d7-8803-c2d12c407256 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] No waiting events found dispatching network-vif-plugged-6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1315.433780] env[63371]: WARNING nova.compute.manager [req-d8e70d9a-c75e-4fca-a1c0-3d502e287ab4 req-8ecadf33-1246-45d7-8803-c2d12c407256 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Received unexpected event network-vif-plugged-6c410064-2e43-498a-bc47-de2e9ed224f0 for instance with vm_state building and task_state spawning. [ 1315.439776] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1315.439776] env[63371]: value = "task-1773502" [ 1315.439776] env[63371]: _type = "Task" [ 1315.439776] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.448187] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5245285a-70cc-deb8-092a-014866beb7f3, 'name': SearchDatastore_Task, 'duration_secs': 0.012} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.450990] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1315.453933] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c70af115-9180-412e-ab39-caaa67f7ab27 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.465773] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773502, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.475548] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1315.475548] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5219e93d-3387-657e-f89f-ed0cc527a85a" [ 1315.475548] env[63371]: _type = "Task" [ 1315.475548] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.488539] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5219e93d-3387-657e-f89f-ed0cc527a85a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.523054] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773498, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.640589] env[63371]: DEBUG oslo_concurrency.lockutils [req-0cb1e1f4-255d-455c-bd91-c43c5f5ac7b4 req-543b79fb-e359-4daa-ab2e-f3478476c6ab service nova] Releasing lock "refresh_cache-ca202079-2eae-441e-80f6-e403497e137d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.682097] env[63371]: DEBUG nova.network.neutron [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Successfully created port: a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1315.705912] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "cfbd0c7c-243e-497a-acb1-ab9323c23574" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1315.706430] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1315.762785] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9f0a41-f215-4208-9ee9-6f3220211dde {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.775267] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911d1336-02eb-4475-bbe6-c5cfc58b8d41 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.783463] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773501, 'name': ReconfigVM_Task, 'duration_secs': 0.508828} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.810528] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Reconfigured VM instance instance-00000002 to attach disk [datastore1] a43fed87-5205-4148-834e-66778a90b7bc/a43fed87-5205-4148-834e-66778a90b7bc.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1315.811785] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b39159c2-2ac8-45b6-bb1e-4eeb09861dd7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.816789] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01609b7-ca11-4d2e-b993-7338b9a6523f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.828172] env[63371]: DEBUG oslo_vmware.api [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773499, 'name': PowerOnVM_Task, 'duration_secs': 0.510147} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.829741] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1315.830437] env[63371]: INFO nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Took 15.72 seconds to spawn the instance on the hypervisor. [ 1315.830725] env[63371]: DEBUG nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1315.831213] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1315.831213] env[63371]: value = "task-1773503" [ 1315.831213] env[63371]: _type = "Task" [ 1315.831213] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1315.832287] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415b918e-d2ef-4b14-98d9-fb675d761842 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.837330] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40facd7d-18fd-4b1c-8058-0d9e8583b717 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1315.861624] env[63371]: DEBUG nova.compute.provider_tree [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1315.863146] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773503, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.967405] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773502, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1315.970461] env[63371]: DEBUG nova.network.neutron [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Updating instance_info_cache with network_info: [{"id": "6c410064-2e43-498a-bc47-de2e9ed224f0", "address": "fa:16:3e:47:47:ba", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c410064-2e", "ovs_interfaceid": "6c410064-2e43-498a-bc47-de2e9ed224f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1315.993518] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5219e93d-3387-657e-f89f-ed0cc527a85a, 'name': SearchDatastore_Task, 'duration_secs': 0.067625} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1315.993811] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1315.994110] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] fc0715a1-a056-4a1b-a86e-959680effc97/fc0715a1-a056-4a1b-a86e-959680effc97.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1315.994393] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3071d6d-c8f8-496d-8b44-ef41c7f89c05 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.008773] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1316.008773] env[63371]: value = "task-1773504" [ 1316.008773] env[63371]: _type = "Task" [ 1316.008773] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.035991] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773498, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.09842} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.039966] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] ca202079-2eae-441e-80f6-e403497e137d/ca202079-2eae-441e-80f6-e403497e137d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1316.040267] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1316.040545] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773504, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.040760] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1569155d-6fec-4efd-a9ab-1049caf03170 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.048283] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1316.048283] env[63371]: value = "task-1773505" [ 1316.048283] env[63371]: _type = "Task" [ 1316.048283] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.057671] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.350677] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773503, 'name': Rename_Task, 'duration_secs': 0.231559} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.350959] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1316.351225] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0759fa23-7dfd-4e8b-a33e-a91a604779b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.358717] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1316.358717] env[63371]: value = "task-1773507" [ 1316.358717] env[63371]: _type = "Task" [ 1316.358717] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.375509] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773507, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.379034] env[63371]: INFO nova.compute.manager [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Took 20.55 seconds to build instance. [ 1316.396354] env[63371]: ERROR nova.scheduler.client.report [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [req-10f1947e-396d-43ed-9d12-a2fbd1678a9d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-10f1947e-396d-43ed-9d12-a2fbd1678a9d"}]} [ 1316.421508] env[63371]: DEBUG nova.scheduler.client.report [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1316.438687] env[63371]: DEBUG nova.scheduler.client.report [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1316.438973] env[63371]: DEBUG nova.compute.provider_tree [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 166, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1316.456723] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773502, 'name': ReconfigVM_Task, 'duration_secs': 0.544916} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.456723] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094/3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1316.457390] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e1530288-b322-4c2e-a2dd-6eb99be72fc3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.465075] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1316.465075] env[63371]: value = "task-1773508" [ 1316.465075] env[63371]: _type = "Task" [ 1316.465075] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.467198] env[63371]: DEBUG nova.scheduler.client.report [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1316.473537] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1316.476096] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "refresh_cache-d9523239-79d1-434f-977a-e1f0e358c82b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1316.476441] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Instance network_info: |[{"id": "6c410064-2e43-498a-bc47-de2e9ed224f0", "address": "fa:16:3e:47:47:ba", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c410064-2e", "ovs_interfaceid": "6c410064-2e43-498a-bc47-de2e9ed224f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1316.482601] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:47:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '842f738f-eaa4-4444-a9bf-90d2b533184c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c410064-2e43-498a-bc47-de2e9ed224f0', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1316.494422] env[63371]: DEBUG oslo.service.loopingcall [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1316.497744] env[63371]: DEBUG nova.network.neutron [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Successfully updated port: 9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1316.497744] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1316.497744] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-262aa6e6-7e1e-4e3c-9152-c309aa805323 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.520458] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773508, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.522162] env[63371]: DEBUG nova.scheduler.client.report [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1316.531556] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1316.531556] env[63371]: value = "task-1773509" [ 1316.531556] env[63371]: _type = "Task" [ 1316.531556] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1316.536023] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1316.537361] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1316.537361] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1316.537361] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1316.537361] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1316.537361] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1316.537682] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1316.537682] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1316.537891] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1316.537968] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1316.538366] env[63371]: DEBUG nova.virt.hardware [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1316.540000] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773504, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.540000] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a881a3-26a4-4e1d-96e0-46ecab7cc855 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.557192] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dc2327-1b06-4cb6-bce0-8899e093c89f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.562542] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773509, 'name': CreateVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.571647] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.786589] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69f4768-e7d7-45ae-9a92-7ebc24c220ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.800847] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d00bb075-cd39-4342-93d6-56df6336e36b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.834834] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61539bb-54df-42c7-bba0-3017e55d70a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.846030] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8669a5b9-b1d8-4e33-aec0-de9b81df5756 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.862392] env[63371]: DEBUG nova.compute.provider_tree [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1316.877688] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773507, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1316.881475] env[63371]: DEBUG oslo_concurrency.lockutils [None req-862692ae-4512-4eb7-8439-7dabd08a4005 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.062s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1316.983994] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773508, 'name': Rename_Task, 'duration_secs': 0.221501} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1316.983994] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1316.985454] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1cbc91a-d5dc-4404-a341-94754d0f43cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1316.990905] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1316.990905] env[63371]: value = "task-1773510" [ 1316.990905] env[63371]: _type = "Task" [ 1316.990905] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.007026] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773510, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.015928] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1317.016105] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquired lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.016424] env[63371]: DEBUG nova.network.neutron [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1317.032572] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773504, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.054022] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773509, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.062374] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773505, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.634072} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.062374] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1317.062981] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c4b8b5-c4a5-46e6-b69d-4d5b81bc59d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.093904] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] ca202079-2eae-441e-80f6-e403497e137d/ca202079-2eae-441e-80f6-e403497e137d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1317.094262] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8bcbd397-c7b4-47b2-8ea2-d231b41a26ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.119877] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1317.119877] env[63371]: value = "task-1773511" [ 1317.119877] env[63371]: _type = "Task" [ 1317.119877] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.132820] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773511, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.383051] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773507, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.385913] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1317.424250] env[63371]: DEBUG nova.scheduler.client.report [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 18 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1317.424250] env[63371]: DEBUG nova.compute.provider_tree [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 18 to 19 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1317.424600] env[63371]: DEBUG nova.compute.provider_tree [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1317.508744] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773510, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.535376] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773504, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.205546} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.535854] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] fc0715a1-a056-4a1b-a86e-959680effc97/fc0715a1-a056-4a1b-a86e-959680effc97.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1317.535854] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1317.536927] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f96db72-167b-4c66-a22a-bae963c5229d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.545521] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1317.545521] env[63371]: value = "task-1773512" [ 1317.545521] env[63371]: _type = "Task" [ 1317.545521] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.555489] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773509, 'name': CreateVM_Task, 'duration_secs': 0.610202} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1317.555489] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1317.555489] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1317.555657] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1317.556251] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1317.559568] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16793177-b2db-40c3-a5ea-43c3018cca52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1317.561432] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773512, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.565455] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1317.565455] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b252b6-d399-3395-0387-6b31e85c5a9b" [ 1317.565455] env[63371]: _type = "Task" [ 1317.565455] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1317.575526] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b252b6-d399-3395-0387-6b31e85c5a9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.628639] env[63371]: DEBUG nova.network.neutron [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1317.641480] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773511, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.886648] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773507, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1317.928240] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1317.929371] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.497s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1317.929449] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1317.935035] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.685s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1317.936400] env[63371]: INFO nova.compute.claims [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1318.005755] env[63371]: DEBUG oslo_vmware.api [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773510, 'name': PowerOnVM_Task, 'duration_secs': 0.88435} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.006731] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1318.007029] env[63371]: INFO nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Took 13.42 seconds to spawn the instance on the hypervisor. [ 1318.007266] env[63371]: DEBUG nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1318.008574] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc0a7e8-cff7-4a77-848e-37150d06e344 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.035555] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1318.035555] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1318.060352] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773512, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073724} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.060352] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1318.060781] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c841bfcd-140b-47dd-88c1-8d4a24ddc4ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.088071] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] fc0715a1-a056-4a1b-a86e-959680effc97/fc0715a1-a056-4a1b-a86e-959680effc97.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1318.093209] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09f245f1-3a82-4b7a-915b-074e68f82680 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.118773] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b252b6-d399-3395-0387-6b31e85c5a9b, 'name': SearchDatastore_Task, 'duration_secs': 0.011057} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.119395] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.119632] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1318.119880] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1318.119996] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1318.120193] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1318.120527] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1318.120527] env[63371]: value = "task-1773513" [ 1318.120527] env[63371]: _type = "Task" [ 1318.120527] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.120735] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40f91fc1-b6c9-4a68-838d-0070cb180b91 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.143384] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773511, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.143384] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1318.143384] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1318.144023] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773513, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.144946] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-854dc114-1807-4ebb-b827-14fdc8467d0b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.155519] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1318.155519] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52eec37f-5ae0-156c-9c8f-147f5d972533" [ 1318.155519] env[63371]: _type = "Task" [ 1318.155519] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.165611] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52eec37f-5ae0-156c-9c8f-147f5d972533, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.344774] env[63371]: DEBUG nova.network.neutron [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updating instance_info_cache with network_info: [{"id": "9a4b63df-9697-47a1-81ad-c69476a80975", "address": "fa:16:3e:ca:f3:37", "network": {"id": "5b9593c6-3e8e-4b0f-ad69-daf3e2419d2c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-90261722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e7f96aff7d240469616d256291f7081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a4b63df-96", "ovs_interfaceid": "9a4b63df-9697-47a1-81ad-c69476a80975", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1318.377732] env[63371]: DEBUG oslo_vmware.api [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773507, 'name': PowerOnVM_Task, 'duration_secs': 1.868903} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.378196] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1318.378555] env[63371]: INFO nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Took 16.05 seconds to spawn the instance on the hypervisor. [ 1318.378796] env[63371]: DEBUG nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1318.379700] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024dfc76-e072-4ea4-aa92-84f998fff92c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.448192] env[63371]: DEBUG nova.compute.utils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1318.451464] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1318.451939] env[63371]: DEBUG nova.network.neutron [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1318.553094] env[63371]: INFO nova.compute.manager [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Took 20.32 seconds to build instance. [ 1318.637530] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773511, 'name': ReconfigVM_Task, 'duration_secs': 1.276785} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.641597] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Reconfigured VM instance instance-00000004 to attach disk [datastore1] ca202079-2eae-441e-80f6-e403497e137d/ca202079-2eae-441e-80f6-e403497e137d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1318.642177] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773513, 'name': ReconfigVM_Task, 'duration_secs': 0.346032} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.643709] env[63371]: DEBUG nova.policy [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd6aa709a53564231ac25fb3e878239ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c76a64c712ca4aa98c19600ef0469855', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1318.647724] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4b71038-136f-4f88-8d2e-e773c92d7e46 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.647724] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Reconfigured VM instance instance-00000005 to attach disk [datastore1] fc0715a1-a056-4a1b-a86e-959680effc97/fc0715a1-a056-4a1b-a86e-959680effc97.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1318.649182] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64ccedd5-35ed-4c22-8ce6-163317863c19 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.656870] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1318.656870] env[63371]: value = "task-1773515" [ 1318.656870] env[63371]: _type = "Task" [ 1318.656870] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.663753] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1318.663753] env[63371]: value = "task-1773516" [ 1318.663753] env[63371]: _type = "Task" [ 1318.663753] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.676035] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52eec37f-5ae0-156c-9c8f-147f5d972533, 'name': SearchDatastore_Task, 'duration_secs': 0.019331} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1318.677148] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b98242fc-28fc-49fb-b348-a815b9e051dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.686403] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773516, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.686655] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773515, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.690568] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1318.690568] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d1ad91-cc99-a613-6c1f-74982c743dce" [ 1318.690568] env[63371]: _type = "Task" [ 1318.690568] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1318.705192] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d1ad91-cc99-a613-6c1f-74982c743dce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1318.814310] env[63371]: DEBUG nova.network.neutron [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Successfully updated port: a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1318.848239] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Releasing lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1318.848561] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Instance network_info: |[{"id": "9a4b63df-9697-47a1-81ad-c69476a80975", "address": "fa:16:3e:ca:f3:37", "network": {"id": "5b9593c6-3e8e-4b0f-ad69-daf3e2419d2c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-90261722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e7f96aff7d240469616d256291f7081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a4b63df-96", "ovs_interfaceid": "9a4b63df-9697-47a1-81ad-c69476a80975", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1318.850122] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:f3:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11032cc2-b275-48d2-9c40-9455ea7d49e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a4b63df-9697-47a1-81ad-c69476a80975', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1318.861414] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Creating folder: Project (6e7f96aff7d240469616d256291f7081). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1318.862201] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-711e41d2-aa41-417b-bc04-eb2b2ce0e3ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.874797] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Created folder: Project (6e7f96aff7d240469616d256291f7081) in parent group-v368199. [ 1318.876045] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Creating folder: Instances. Parent ref: group-v368219. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1318.876419] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8b129e8-0f11-4bb5-93ac-2173db6c1851 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.901034] env[63371]: INFO nova.compute.manager [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Took 21.44 seconds to build instance. [ 1318.955028] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1319.054654] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fc0044b-eac0-4ed0-b85b-e8626ac9e4aa tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.828s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.099164] env[63371]: DEBUG nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Received event network-changed-6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1319.099164] env[63371]: DEBUG nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Refreshing instance network info cache due to event network-changed-6c410064-2e43-498a-bc47-de2e9ed224f0. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1319.099164] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Acquiring lock "refresh_cache-d9523239-79d1-434f-977a-e1f0e358c82b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1319.100493] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Acquired lock "refresh_cache-d9523239-79d1-434f-977a-e1f0e358c82b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.100493] env[63371]: DEBUG nova.network.neutron [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Refreshing network info cache for port 6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1319.194118] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773516, 'name': Rename_Task, 'duration_secs': 0.173219} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.194448] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773515, 'name': Rename_Task, 'duration_secs': 0.173766} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.199378] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1319.203640] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1319.203990] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Created folder: Instances in parent group-v368219. [ 1319.204341] env[63371]: DEBUG oslo.service.loopingcall [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1319.204843] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5389137-eec5-4f25-a8dd-1736830c9a70 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.207131] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-60373b74-ae42-4e12-a764-3d7801e2b35d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.208913] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1319.209616] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b292ccc-e2c6-4b5a-be99-62cfa844b798 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.237073] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d1ad91-cc99-a613-6c1f-74982c743dce, 'name': SearchDatastore_Task, 'duration_secs': 0.014018} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1319.238565] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1319.238836] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] d9523239-79d1-434f-977a-e1f0e358c82b/d9523239-79d1-434f-977a-e1f0e358c82b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1319.239280] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-157b708d-f9fa-41f7-8c25-cca0be2baa62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.247148] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1319.247148] env[63371]: value = "task-1773519" [ 1319.247148] env[63371]: _type = "Task" [ 1319.247148] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.247819] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1319.247819] env[63371]: value = "task-1773520" [ 1319.247819] env[63371]: _type = "Task" [ 1319.247819] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.270670] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1319.270670] env[63371]: value = "task-1773521" [ 1319.270670] env[63371]: _type = "Task" [ 1319.270670] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.284698] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1319.284698] env[63371]: value = "task-1773522" [ 1319.284698] env[63371]: _type = "Task" [ 1319.284698] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1319.284698] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773520, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.284698] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773519, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.296201] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773521, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.300873] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773522, 'name': CreateVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.317222] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269326ec-3a98-4c79-ac07-310ec839ac40 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.324385] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1319.324385] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquired lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1319.324385] env[63371]: DEBUG nova.network.neutron [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1319.326898] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d37f49c-9cc8-4895-bfdd-5760a2479d9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.362768] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f72d96-5025-4dbf-8b0b-990776f07981 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.373028] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162f00fc-251b-4ca7-b91c-446408e43211 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1319.390268] env[63371]: DEBUG nova.compute.provider_tree [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1319.402572] env[63371]: DEBUG oslo_concurrency.lockutils [None req-94086d5f-8be1-4564-a689-c4eb3f727fd5 tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "a43fed87-5205-4148-834e-66778a90b7bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.958s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1319.561594] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1319.623766] env[63371]: DEBUG nova.network.neutron [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Successfully created port: 3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1319.766527] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773519, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.771342] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773520, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.783422] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773521, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.797746] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773522, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1319.894655] env[63371]: DEBUG nova.scheduler.client.report [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1319.905922] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1319.963506] env[63371]: DEBUG nova.network.neutron [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1319.972265] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1320.002179] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1320.002780] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1320.002780] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1320.002979] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1320.003640] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1320.004407] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1320.004407] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1320.004407] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1320.004612] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1320.004674] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1320.004856] env[63371]: DEBUG nova.virt.hardware [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1320.005928] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2057de31-113d-4407-b6ec-3d7dc223b0cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.021858] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-736baa00-64da-449a-94bb-4f32f0741f9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.100033] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.281662] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773519, 'name': PowerOnVM_Task, 'duration_secs': 0.704688} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.281662] env[63371]: DEBUG oslo_vmware.api [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773520, 'name': PowerOnVM_Task, 'duration_secs': 0.701654} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.281662] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1320.281662] env[63371]: INFO nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Took 10.97 seconds to spawn the instance on the hypervisor. [ 1320.281662] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1320.281921] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1320.281921] env[63371]: INFO nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Took 13.33 seconds to spawn the instance on the hypervisor. [ 1320.281921] env[63371]: DEBUG nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1320.282951] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a488976b-13df-4cda-95af-d82abfc906c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.289163] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f597473-ec22-4760-8fae-e0fd7c1e61b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.303570] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773521, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.662567} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.312031] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] d9523239-79d1-434f-977a-e1f0e358c82b/d9523239-79d1-434f-977a-e1f0e358c82b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1320.312031] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1320.317314] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a00b80a-ba43-4d25-8fba-a40ed58729af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.321020] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773522, 'name': CreateVM_Task, 'duration_secs': 0.717645} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.321020] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1320.321020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1320.321020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.321302] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1320.321544] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6147d35d-50c8-49f2-a7bd-7034ea2e3003 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.327238] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1320.327238] env[63371]: value = "task-1773524" [ 1320.327238] env[63371]: _type = "Task" [ 1320.327238] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.329579] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1320.329579] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c28846-3e70-cc8c-322e-986b783b46e8" [ 1320.329579] env[63371]: _type = "Task" [ 1320.329579] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.337065] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773524, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.345128] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c28846-3e70-cc8c-322e-986b783b46e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.404018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.466s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.404018] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1320.406406] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.478s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.408626] env[63371]: INFO nova.compute.claims [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1320.433462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.573012] env[63371]: DEBUG nova.network.neutron [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Updated VIF entry in instance network info cache for port 6c410064-2e43-498a-bc47-de2e9ed224f0. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1320.573286] env[63371]: DEBUG nova.network.neutron [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Updating instance_info_cache with network_info: [{"id": "6c410064-2e43-498a-bc47-de2e9ed224f0", "address": "fa:16:3e:47:47:ba", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c410064-2e", "ovs_interfaceid": "6c410064-2e43-498a-bc47-de2e9ed224f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.647529] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Acquiring lock "cffe6a79-ad7e-4488-b179-608a03c978aa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.647781] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.648013] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Acquiring lock "cffe6a79-ad7e-4488-b179-608a03c978aa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1320.648456] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1320.648456] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1320.655703] env[63371]: DEBUG nova.network.neutron [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updating instance_info_cache with network_info: [{"id": "a2807b8c-5895-474a-9c75-58bd21982409", "address": "fa:16:3e:c2:68:79", "network": {"id": "6b2f7559-22c6-4657-b126-18f7ace337d5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1011247410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c99d37d52edb40f99efb471da50f5845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2807b8c-58", "ovs_interfaceid": "a2807b8c-5895-474a-9c75-58bd21982409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1320.657519] env[63371]: INFO nova.compute.manager [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Terminating instance [ 1320.662336] env[63371]: DEBUG nova.compute.manager [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1320.662336] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1320.662714] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d34f1be-4205-4836-8d3c-cd260c4ce37a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.677483] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1320.679279] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58714c54-4fab-44cc-9fb5-74a6a3d45963 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.687408] env[63371]: DEBUG oslo_vmware.api [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Waiting for the task: (returnval){ [ 1320.687408] env[63371]: value = "task-1773525" [ 1320.687408] env[63371]: _type = "Task" [ 1320.687408] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.699228] env[63371]: DEBUG oslo_vmware.api [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Task: {'id': task-1773525, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.838333] env[63371]: INFO nova.compute.manager [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Took 21.86 seconds to build instance. [ 1320.857555] env[63371]: INFO nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Took 21.52 seconds to build instance. [ 1320.871060] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c28846-3e70-cc8c-322e-986b783b46e8, 'name': SearchDatastore_Task, 'duration_secs': 0.015082} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.871060] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773524, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116525} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1320.872261] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1320.872531] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1320.872773] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1320.873126] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1320.873126] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1320.873446] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1320.874073] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a47e442-3408-4a2c-8c40-de5f1f5f35f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.878768] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca90e3d3-c4fa-4a4e-a7b6-9bb4fcfe4e75 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.918613] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] d9523239-79d1-434f-977a-e1f0e358c82b/d9523239-79d1-434f-977a-e1f0e358c82b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1320.920364] env[63371]: DEBUG nova.compute.utils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1320.927563] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fec48d7-afa1-4b4e-88e8-531f9f74d187 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.946577] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1320.947046] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1320.954596] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1320.957930] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1320.958314] env[63371]: DEBUG nova.network.neutron [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1320.962102] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e04d798b-2c45-4a47-890c-d3dff6829115 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1320.971065] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1320.971065] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b5129a-7d8b-46e5-30e8-b73a4287f036" [ 1320.971065] env[63371]: _type = "Task" [ 1320.971065] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.971065] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1320.971065] env[63371]: value = "task-1773526" [ 1320.971065] env[63371]: _type = "Task" [ 1320.971065] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1320.983747] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b5129a-7d8b-46e5-30e8-b73a4287f036, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1320.988840] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773526, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.060630] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "362d8303-524a-457a-b8d9-2bad87fa816b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.060862] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "362d8303-524a-457a-b8d9-2bad87fa816b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.080831] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Releasing lock "refresh_cache-d9523239-79d1-434f-977a-e1f0e358c82b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1321.080831] env[63371]: DEBUG nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Received event network-vif-plugged-9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1321.080831] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Acquiring lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.080831] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.080831] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.081503] env[63371]: DEBUG nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] No waiting events found dispatching network-vif-plugged-9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1321.081503] env[63371]: WARNING nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Received unexpected event network-vif-plugged-9a4b63df-9697-47a1-81ad-c69476a80975 for instance with vm_state building and task_state spawning. [ 1321.081503] env[63371]: DEBUG nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Received event network-changed-9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1321.081503] env[63371]: DEBUG nova.compute.manager [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Refreshing instance network info cache due to event network-changed-9a4b63df-9697-47a1-81ad-c69476a80975. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1321.081636] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Acquiring lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1321.081670] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Acquired lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.082743] env[63371]: DEBUG nova.network.neutron [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Refreshing network info cache for port 9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1321.112035] env[63371]: DEBUG nova.compute.manager [None req-8a79505d-7d85-432e-bd2a-761f17d60453 tempest-ServerDiagnosticsTest-1087031006 tempest-ServerDiagnosticsTest-1087031006-project-admin] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1321.112035] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd46880a-84b6-4020-a059-7cacd06dbf28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.117703] env[63371]: INFO nova.compute.manager [None req-8a79505d-7d85-432e-bd2a-761f17d60453 tempest-ServerDiagnosticsTest-1087031006 tempest-ServerDiagnosticsTest-1087031006-project-admin] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Retrieving diagnostics [ 1321.119373] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f94c5f0-ab3c-422e-ab4d-026e1e9ba5b8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.169736] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Releasing lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1321.169949] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Instance network_info: |[{"id": "a2807b8c-5895-474a-9c75-58bd21982409", "address": "fa:16:3e:c2:68:79", "network": {"id": "6b2f7559-22c6-4657-b126-18f7ace337d5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1011247410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c99d37d52edb40f99efb471da50f5845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2807b8c-58", "ovs_interfaceid": "a2807b8c-5895-474a-9c75-58bd21982409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1321.170435] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:68:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '447ff42d-b33e-4b5d-8b7f-e8117ebbbc92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2807b8c-5895-474a-9c75-58bd21982409', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1321.178237] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Creating folder: Project (c99d37d52edb40f99efb471da50f5845). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1321.179917] env[63371]: DEBUG nova.policy [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5bee716ea542f9a463941fa477a897', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9d19f4772ff46d3b3024851822cf833', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1321.184573] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-caf9924d-3c96-460b-9961-c07247672990 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.196182] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Created folder: Project (c99d37d52edb40f99efb471da50f5845) in parent group-v368199. [ 1321.196233] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Creating folder: Instances. Parent ref: group-v368222. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1321.196933] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bc726227-2570-40aa-9505-f6a0f639681b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.205144] env[63371]: DEBUG oslo_vmware.api [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Task: {'id': task-1773525, 'name': PowerOffVM_Task, 'duration_secs': 0.299199} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.206078] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1321.206161] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1321.206413] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e62cb396-1718-4cce-8b53-5ec803fcd5b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.215913] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Created folder: Instances in parent group-v368222. [ 1321.215913] env[63371]: DEBUG oslo.service.loopingcall [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1321.215913] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1321.216085] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0216c982-e174-4f87-9676-850002796b99 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.238993] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1321.238993] env[63371]: value = "task-1773530" [ 1321.238993] env[63371]: _type = "Task" [ 1321.238993] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.247582] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773530, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.286421] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1321.286421] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1321.286421] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Deleting the datastore file [datastore1] cffe6a79-ad7e-4488-b179-608a03c978aa {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1321.286800] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cee4e9e0-aceb-4e19-b3ac-d30d1b97fa62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.297430] env[63371]: DEBUG oslo_vmware.api [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Waiting for the task: (returnval){ [ 1321.297430] env[63371]: value = "task-1773531" [ 1321.297430] env[63371]: _type = "Task" [ 1321.297430] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.307073] env[63371]: DEBUG oslo_vmware.api [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Task: {'id': task-1773531, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.353624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-861cb08f-139c-45e5-9ef0-00176651296f tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "ca202079-2eae-441e-80f6-e403497e137d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.388s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.354483] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37631511-f400-495d-90e3-c34dd5440d8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.365316] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd62b276-f7be-4cc0-a687-25ff759144f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.400422] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "fc0715a1-a056-4a1b-a86e-959680effc97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.072s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1321.403602] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc57257f-c049-4d3d-b2c0-fb8d50a253f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.413416] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5226efc-1bd5-4ab7-a335-4c8969954ed9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.442883] env[63371]: DEBUG nova.compute.provider_tree [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1321.496574] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b5129a-7d8b-46e5-30e8-b73a4287f036, 'name': SearchDatastore_Task, 'duration_secs': 0.013008} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.499500] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773526, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.499796] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-711f7c5a-997f-4954-8437-f99429b4b5a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.510313] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1321.510313] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5247eff4-c276-6012-6389-87fe72458061" [ 1321.510313] env[63371]: _type = "Task" [ 1321.510313] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.520728] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5247eff4-c276-6012-6389-87fe72458061, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.750757] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773530, 'name': CreateVM_Task, 'duration_secs': 0.416363} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.751060] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1321.751726] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1321.751985] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.752336] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1321.752614] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-065e399e-6e58-447d-8356-a41965f39051 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.757366] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1321.757366] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524241e8-7066-5890-6e54-62c21f744114" [ 1321.757366] env[63371]: _type = "Task" [ 1321.757366] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1321.767449] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524241e8-7066-5890-6e54-62c21f744114, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1321.809077] env[63371]: DEBUG oslo_vmware.api [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Task: {'id': task-1773531, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248108} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.809576] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1321.809962] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1321.810392] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1321.810753] env[63371]: INFO nova.compute.manager [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1321.811170] env[63371]: DEBUG oslo.service.loopingcall [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1321.811520] env[63371]: DEBUG nova.compute.manager [-] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1321.811776] env[63371]: DEBUG nova.network.neutron [-] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1321.860249] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1321.910832] env[63371]: DEBUG nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1321.947504] env[63371]: DEBUG nova.scheduler.client.report [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1321.972969] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1321.978386] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "1924d3d2-cc88-4fd2-b509-8463da796658" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1321.978606] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "1924d3d2-cc88-4fd2-b509-8463da796658" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1321.988847] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773526, 'name': ReconfigVM_Task, 'duration_secs': 0.601484} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1321.993707] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Reconfigured VM instance instance-00000006 to attach disk [datastore1] d9523239-79d1-434f-977a-e1f0e358c82b/d9523239-79d1-434f-977a-e1f0e358c82b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1321.993707] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ac3b867-6cc1-436e-a7e3-2a7390d7f8e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.998750] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1321.998750] env[63371]: value = "task-1773532" [ 1321.998750] env[63371]: _type = "Task" [ 1321.998750] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.013685] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773532, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.015901] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1322.016223] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1322.016395] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1322.016580] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1322.016724] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1322.016927] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1322.017198] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1322.017339] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1322.017650] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1322.018055] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1322.018257] env[63371]: DEBUG nova.virt.hardware [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1322.022548] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df748729-8d12-458e-aaa2-1e88619c0702 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.036728] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89136a50-ec0d-4a80-88d4-2130a2c3999b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.041642] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5247eff4-c276-6012-6389-87fe72458061, 'name': SearchDatastore_Task, 'duration_secs': 0.01192} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.041899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1322.043686] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] ca53accc-a15f-4503-87e5-7cbf3e2c0b43/ca53accc-a15f-4503-87e5-7cbf3e2c0b43.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1322.043686] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1531be4e-cd78-4f38-b2ca-a64b4a472f18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.058557] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1322.058557] env[63371]: value = "task-1773533" [ 1322.058557] env[63371]: _type = "Task" [ 1322.058557] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.068860] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773533, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.276078] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524241e8-7066-5890-6e54-62c21f744114, 'name': SearchDatastore_Task, 'duration_secs': 0.035074} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.276644] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1322.276727] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1322.277034] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1322.277140] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1322.277318] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1322.277598] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eabd07d2-d810-4a81-aa8e-1f207c32d698 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.287584] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1322.287780] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1322.288824] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87c601eb-5e37-4fb4-ab26-f40428a05cf5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.295506] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1322.295506] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e91813-47a5-646c-2b2f-ff92f30dc873" [ 1322.295506] env[63371]: _type = "Task" [ 1322.295506] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.305031] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e91813-47a5-646c-2b2f-ff92f30dc873, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.396404] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.445935] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1322.454492] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.048s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1322.455040] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1322.459269] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.360s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1322.460292] env[63371]: INFO nova.compute.claims [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1322.515444] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773532, 'name': Rename_Task, 'duration_secs': 0.213494} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.518036] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1322.518036] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-baccbaa9-8d3c-4ad5-a737-40f2d4f9c84d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.530904] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1322.530904] env[63371]: value = "task-1773535" [ 1322.530904] env[63371]: _type = "Task" [ 1322.530904] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.547617] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773535, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.574512] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773533, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.754213] env[63371]: DEBUG nova.network.neutron [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updated VIF entry in instance network info cache for port 9a4b63df-9697-47a1-81ad-c69476a80975. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1322.754768] env[63371]: DEBUG nova.network.neutron [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updating instance_info_cache with network_info: [{"id": "9a4b63df-9697-47a1-81ad-c69476a80975", "address": "fa:16:3e:ca:f3:37", "network": {"id": "5b9593c6-3e8e-4b0f-ad69-daf3e2419d2c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-90261722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e7f96aff7d240469616d256291f7081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a4b63df-96", "ovs_interfaceid": "9a4b63df-9697-47a1-81ad-c69476a80975", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.771697] env[63371]: DEBUG nova.network.neutron [-] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1322.811176] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e91813-47a5-646c-2b2f-ff92f30dc873, 'name': SearchDatastore_Task, 'duration_secs': 0.017467} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1322.812438] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0689f42a-ec50-47e7-bf7c-36c4a12c23fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1322.820438] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1322.820438] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cdd6ff-65bd-ef0e-91c4-d14b039510da" [ 1322.820438] env[63371]: _type = "Task" [ 1322.820438] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1322.832735] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cdd6ff-65bd-ef0e-91c4-d14b039510da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1322.965100] env[63371]: DEBUG nova.compute.utils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1322.973421] env[63371]: DEBUG nova.network.neutron [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Successfully created port: d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1322.977001] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1322.977001] env[63371]: DEBUG nova.network.neutron [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1323.047353] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773535, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.076216] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773533, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.758741} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.076778] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] ca53accc-a15f-4503-87e5-7cbf3e2c0b43/ca53accc-a15f-4503-87e5-7cbf3e2c0b43.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1323.076778] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1323.077169] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8203562-1cfd-4620-b210-d6601baa2413 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.086854] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1323.086854] env[63371]: value = "task-1773536" [ 1323.086854] env[63371]: _type = "Task" [ 1323.086854] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.107501] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773536, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.206869] env[63371]: DEBUG nova.policy [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3dec49b67cd49159192b5c2756fc2e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0f2fde472b14ab9a4d20947ca714191', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1323.259642] env[63371]: DEBUG oslo_concurrency.lockutils [req-d7e350e3-061e-48db-ad03-96f5e46437af req-70911646-fffe-4a7e-8ce4-c5e9331b6cc1 service nova] Releasing lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.277302] env[63371]: INFO nova.compute.manager [-] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Took 1.47 seconds to deallocate network for instance. [ 1323.334026] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cdd6ff-65bd-ef0e-91c4-d14b039510da, 'name': SearchDatastore_Task, 'duration_secs': 0.011107} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.334026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1323.334026] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 47c1c242-d190-4523-8033-307c5a9b7535/47c1c242-d190-4523-8033-307c5a9b7535.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1323.336079] env[63371]: DEBUG nova.compute.manager [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Received event network-vif-plugged-a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1323.336577] env[63371]: DEBUG oslo_concurrency.lockutils [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] Acquiring lock "47c1c242-d190-4523-8033-307c5a9b7535-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.337121] env[63371]: DEBUG oslo_concurrency.lockutils [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] Lock "47c1c242-d190-4523-8033-307c5a9b7535-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.337396] env[63371]: DEBUG oslo_concurrency.lockutils [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] Lock "47c1c242-d190-4523-8033-307c5a9b7535-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1323.337694] env[63371]: DEBUG nova.compute.manager [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] No waiting events found dispatching network-vif-plugged-a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1323.337963] env[63371]: WARNING nova.compute.manager [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Received unexpected event network-vif-plugged-a2807b8c-5895-474a-9c75-58bd21982409 for instance with vm_state building and task_state spawning. [ 1323.338569] env[63371]: DEBUG nova.compute.manager [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Received event network-changed-a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1323.339376] env[63371]: DEBUG nova.compute.manager [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Refreshing instance network info cache due to event network-changed-a2807b8c-5895-474a-9c75-58bd21982409. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1323.339376] env[63371]: DEBUG oslo_concurrency.lockutils [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] Acquiring lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.339902] env[63371]: DEBUG oslo_concurrency.lockutils [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] Acquired lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.340198] env[63371]: DEBUG nova.network.neutron [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Refreshing network info cache for port a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1323.343017] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60988934-5b4b-4b08-82c7-abde65c21d4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.351592] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1323.351592] env[63371]: value = "task-1773537" [ 1323.351592] env[63371]: _type = "Task" [ 1323.351592] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.363289] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.474959] env[63371]: DEBUG nova.network.neutron [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Successfully updated port: 3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1323.476983] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1323.547057] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773535, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.604251] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773536, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.200019} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.611108] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1323.611492] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd1fc60-54f4-4915-91c3-16c49efcdcee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.641278] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] ca53accc-a15f-4503-87e5-7cbf3e2c0b43/ca53accc-a15f-4503-87e5-7cbf3e2c0b43.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1323.643964] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee86346d-986b-4e5c-8188-835f3a2eb012 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.674942] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1323.674942] env[63371]: value = "task-1773538" [ 1323.674942] env[63371]: _type = "Task" [ 1323.674942] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.695529] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.741442] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.741873] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1323.788701] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1323.873631] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773537, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.931306] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434070a5-0654-4deb-bca1-18adf2d16e59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.944380] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c3386f-676a-4f6b-bb64-dce8846172f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.983883] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1323.984057] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.984195] env[63371]: DEBUG nova.network.neutron [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1323.987052] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13f024c-020c-45f6-9f91-93db96d92530 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.000860] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d286aba1-6d15-4590-b868-b682a880cf78 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.022168] env[63371]: DEBUG nova.compute.provider_tree [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1324.045576] env[63371]: DEBUG oslo_vmware.api [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773535, 'name': PowerOnVM_Task, 'duration_secs': 1.181002} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.045860] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1324.046079] env[63371]: INFO nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Took 12.34 seconds to spawn the instance on the hypervisor. [ 1324.046263] env[63371]: DEBUG nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1324.047088] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59a3b1b-053d-43db-9dd8-55f351e32061 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.184576] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.214631] env[63371]: DEBUG nova.network.neutron [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Successfully created port: 17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1324.372493] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773537, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.878983} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.372858] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 47c1c242-d190-4523-8033-307c5a9b7535/47c1c242-d190-4523-8033-307c5a9b7535.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1324.373114] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1324.373509] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f61ca92c-979c-4691-ac81-8b3ae4e94d61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.380601] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1324.380601] env[63371]: value = "task-1773539" [ 1324.380601] env[63371]: _type = "Task" [ 1324.380601] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.389371] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773539, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.503551] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1324.527754] env[63371]: DEBUG nova.scheduler.client.report [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1324.537768] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1324.538529] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1324.538529] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1324.538529] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1324.538745] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1324.538745] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1324.538965] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1324.540036] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1324.540270] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1324.540467] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1324.540669] env[63371]: DEBUG nova.virt.hardware [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1324.541612] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7753c05c-58cf-40e7-896f-efc0416dc3cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.545962] env[63371]: DEBUG nova.network.neutron [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1324.559461] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6c300d-0a35-49e7-9ff0-03a037138ad1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.586035] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.586286] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.586508] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.586676] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.586833] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.591342] env[63371]: INFO nova.compute.manager [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Terminating instance [ 1324.592682] env[63371]: INFO nova.compute.manager [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Took 25.20 seconds to build instance. [ 1324.595308] env[63371]: DEBUG nova.compute.manager [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1324.595531] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1324.599104] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5174f20-05fc-4ab4-bb6b-4e78716d234c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.608486] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1324.609176] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49ede7d8-11a6-4806-9bbd-c91a9221565f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.616929] env[63371]: DEBUG oslo_vmware.api [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1324.616929] env[63371]: value = "task-1773540" [ 1324.616929] env[63371]: _type = "Task" [ 1324.616929] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.626080] env[63371]: DEBUG oslo_vmware.api [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773540, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.686545] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.745307] env[63371]: DEBUG nova.network.neutron [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updated VIF entry in instance network info cache for port a2807b8c-5895-474a-9c75-58bd21982409. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1324.745708] env[63371]: DEBUG nova.network.neutron [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updating instance_info_cache with network_info: [{"id": "a2807b8c-5895-474a-9c75-58bd21982409", "address": "fa:16:3e:c2:68:79", "network": {"id": "6b2f7559-22c6-4657-b126-18f7ace337d5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1011247410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c99d37d52edb40f99efb471da50f5845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2807b8c-58", "ovs_interfaceid": "a2807b8c-5895-474a-9c75-58bd21982409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.890915] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773539, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073033} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.893853] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1324.893853] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd42433-4a47-4bde-8a1c-857f522624a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.898169] env[63371]: DEBUG nova.network.neutron [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance_info_cache with network_info: [{"id": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "address": "fa:16:3e:39:29:ca", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eac2e62-a1", "ovs_interfaceid": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.920243] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 47c1c242-d190-4523-8033-307c5a9b7535/47c1c242-d190-4523-8033-307c5a9b7535.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1324.921313] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1324.921619] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Instance network_info: |[{"id": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "address": "fa:16:3e:39:29:ca", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eac2e62-a1", "ovs_interfaceid": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1324.921865] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ceca282d-c3d6-4733-a1c6-0e9d32919382 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.941642] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:29:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1324.949480] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Creating folder: Project (c76a64c712ca4aa98c19600ef0469855). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1324.950235] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-705c0b29-324c-420d-aebc-0bd636dad1fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.959102] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1324.959102] env[63371]: value = "task-1773541" [ 1324.959102] env[63371]: _type = "Task" [ 1324.959102] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.964102] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Created folder: Project (c76a64c712ca4aa98c19600ef0469855) in parent group-v368199. [ 1324.964102] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Creating folder: Instances. Parent ref: group-v368226. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1324.964102] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09496150-cd8c-4c2d-95a3-73e77d489a27 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.971844] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773541, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.972812] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Created folder: Instances in parent group-v368226. [ 1324.973087] env[63371]: DEBUG oslo.service.loopingcall [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1324.973287] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1324.973418] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71b04bdc-0c19-4634-81aa-a09d166e8405 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.995019] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1324.995019] env[63371]: value = "task-1773544" [ 1324.995019] env[63371]: _type = "Task" [ 1324.995019] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.004459] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773544, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.032775] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.033547] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1325.037575] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.604s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.040653] env[63371]: INFO nova.compute.claims [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1325.090597] env[63371]: DEBUG nova.compute.manager [req-ec4c8eca-3d79-4f66-b7ac-c3803439d849 req-1ec8a08e-d0b0-4aaa-8293-251ff9be0c69 service nova] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Received event network-vif-deleted-d2c4ae08-b10f-4881-8089-d2c46693937b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1325.102817] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d44143f5-e7dd-43cd-a12e-8cae027913ff tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "d9523239-79d1-434f-977a-e1f0e358c82b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.722s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1325.132464] env[63371]: DEBUG oslo_vmware.api [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773540, 'name': PowerOffVM_Task, 'duration_secs': 0.19529} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.132728] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1325.132909] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1325.133189] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5419c0ca-8043-488e-8572-bb180bf2a8b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.183496] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773538, 'name': ReconfigVM_Task, 'duration_secs': 1.180942} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.183786] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Reconfigured VM instance instance-00000007 to attach disk [datastore1] ca53accc-a15f-4503-87e5-7cbf3e2c0b43/ca53accc-a15f-4503-87e5-7cbf3e2c0b43.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1325.184443] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2427cd17-3a7f-42b9-8bd1-e88857b0b422 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.194300] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1325.194300] env[63371]: value = "task-1773546" [ 1325.194300] env[63371]: _type = "Task" [ 1325.194300] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.205088] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773546, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.247497] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1325.247804] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1325.248088] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Deleting the datastore file [datastore1] 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1325.248462] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0aac21a0-476d-4de6-9a5c-8c7bd5f44aaa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.252930] env[63371]: DEBUG oslo_concurrency.lockutils [req-807e6b1f-a2d4-4b90-8602-9afd4588dc16 req-70087e0d-4bc5-44d0-9618-2fc7dd4a1aed service nova] Releasing lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1325.259504] env[63371]: DEBUG oslo_vmware.api [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for the task: (returnval){ [ 1325.259504] env[63371]: value = "task-1773547" [ 1325.259504] env[63371]: _type = "Task" [ 1325.259504] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.277238] env[63371]: DEBUG oslo_vmware.api [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773547, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.473549] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773541, 'name': ReconfigVM_Task, 'duration_secs': 0.284713} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.473549] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 47c1c242-d190-4523-8033-307c5a9b7535/47c1c242-d190-4523-8033-307c5a9b7535.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1325.473771] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e19b5131-07d8-4978-9471-835168003719 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.481104] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1325.481104] env[63371]: value = "task-1773548" [ 1325.481104] env[63371]: _type = "Task" [ 1325.481104] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.489911] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773548, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.507338] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773544, 'name': CreateVM_Task, 'duration_secs': 0.434609} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.507724] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1325.508533] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1325.508878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1325.509328] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1325.510348] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a15459b-32a6-4ad8-be40-4e64331bc6eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.517284] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1325.517284] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5282bf79-ac01-453f-116d-726e6ec68cab" [ 1325.517284] env[63371]: _type = "Task" [ 1325.517284] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.530154] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5282bf79-ac01-453f-116d-726e6ec68cab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.546419] env[63371]: DEBUG nova.compute.utils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1325.551563] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1325.551563] env[63371]: DEBUG nova.network.neutron [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1325.610035] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1325.678040] env[63371]: DEBUG nova.policy [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1a8a6cea5f2a4400b8f3bb15101ae129', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7a9101ae72864e0b8af6c598153ff40e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1325.707224] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773546, 'name': Rename_Task, 'duration_secs': 0.226197} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.707224] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1325.707224] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e8cceb4-f559-43b6-97af-0f8d3ff3a35c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.717145] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1325.717145] env[63371]: value = "task-1773549" [ 1325.717145] env[63371]: _type = "Task" [ 1325.717145] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.723516] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.754553] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1325.757140] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.776582] env[63371]: DEBUG oslo_vmware.api [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Task: {'id': task-1773547, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.305787} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.776582] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1325.776582] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1325.776582] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1325.776582] env[63371]: INFO nova.compute.manager [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1325.776818] env[63371]: DEBUG oslo.service.loopingcall [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1325.778676] env[63371]: DEBUG nova.compute.manager [-] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1325.778676] env[63371]: DEBUG nova.network.neutron [-] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1325.944837] env[63371]: DEBUG nova.network.neutron [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Successfully updated port: d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1325.991437] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773548, 'name': Rename_Task, 'duration_secs': 0.154979} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.992117] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1325.992383] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a47560e9-d27f-45e3-a409-d15098a4c3ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.999662] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1325.999662] env[63371]: value = "task-1773550" [ 1325.999662] env[63371]: _type = "Task" [ 1325.999662] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.008357] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773550, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.029688] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5282bf79-ac01-453f-116d-726e6ec68cab, 'name': SearchDatastore_Task, 'duration_secs': 0.012046} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.030080] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1326.030468] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1326.030579] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.030852] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.031072] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1326.031373] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de629420-51e7-4175-8d7a-1f2fa4d3dc8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.039980] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1326.040151] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1326.041022] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a388e72-8c9e-48a6-8c6e-4530bc6dd2f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.046800] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1326.046800] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524a0f14-99ba-6d18-48d5-2c64b84a5c40" [ 1326.046800] env[63371]: _type = "Task" [ 1326.046800] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.050994] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1326.059644] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524a0f14-99ba-6d18-48d5-2c64b84a5c40, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.136254] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.226944] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773549, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.290492] env[63371]: DEBUG nova.network.neutron [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Successfully created port: 358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1326.386349] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49ec11a3-70af-49aa-bff0-c7e4d931b8a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.397034] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ef1c97-6697-4dc5-b726-ae7c79e7ce7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.434422] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26504e33-830d-41fb-a202-d2bf7784122d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.443123] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db23a582-ee50-483b-939a-9ec36f642482 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.447939] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "refresh_cache-201a2d1e-9e2c-4c07-92be-200408874ad4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1326.448104] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired lock "refresh_cache-201a2d1e-9e2c-4c07-92be-200408874ad4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1326.448254] env[63371]: DEBUG nova.network.neutron [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1326.460578] env[63371]: DEBUG nova.compute.provider_tree [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1326.513789] env[63371]: DEBUG oslo_vmware.api [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1773550, 'name': PowerOnVM_Task, 'duration_secs': 0.477552} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.514196] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1326.514449] env[63371]: INFO nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Took 10.04 seconds to spawn the instance on the hypervisor. [ 1326.514792] env[63371]: DEBUG nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1326.519386] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d35381fc-6f99-4866-a084-9563aa4860b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.562277] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524a0f14-99ba-6d18-48d5-2c64b84a5c40, 'name': SearchDatastore_Task, 'duration_secs': 0.008167} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.562277] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c969cfe-57e9-4f86-a4b7-619fefb445d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.573512] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1326.573512] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52942d75-4b8f-948e-8693-71b37de547fc" [ 1326.573512] env[63371]: _type = "Task" [ 1326.573512] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.587470] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52942d75-4b8f-948e-8693-71b37de547fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.729336] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773549, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.845322] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1326.845746] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1326.964630] env[63371]: DEBUG nova.scheduler.client.report [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1327.045693] env[63371]: INFO nova.compute.manager [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Took 23.89 seconds to build instance. [ 1327.051477] env[63371]: DEBUG nova.network.neutron [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1327.071908] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1327.096497] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52942d75-4b8f-948e-8693-71b37de547fc, 'name': SearchDatastore_Task, 'duration_secs': 0.01185} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.096497] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1327.096497] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed/f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1327.096497] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-343eb3ce-b437-4f14-b18a-8b739fd39796 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.111459] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1327.111689] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1327.111841] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1327.113281] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1327.113281] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1327.113281] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1327.113281] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1327.113281] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1327.113489] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1327.113587] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1327.113760] env[63371]: DEBUG nova.virt.hardware [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1327.114930] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb369334-830e-46de-a122-c62b21359646 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.121550] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1327.121550] env[63371]: value = "task-1773552" [ 1327.121550] env[63371]: _type = "Task" [ 1327.121550] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1327.130936] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974f4116-27a9-4e53-acb6-30014bba2ff1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.145214] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773552, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.226805] env[63371]: DEBUG oslo_vmware.api [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773549, 'name': PowerOnVM_Task, 'duration_secs': 1.142406} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1327.227579] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1327.227579] env[63371]: INFO nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Took 13.13 seconds to spawn the instance on the hypervisor. [ 1327.227579] env[63371]: DEBUG nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1327.228339] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56064ba0-f53d-423b-9ddb-f4eabe832ee8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1327.404561] env[63371]: DEBUG nova.network.neutron [-] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.473088] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.473640] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1327.477794] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.082s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.483099] env[63371]: INFO nova.compute.claims [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1327.548707] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8f2711e9-2e75-401f-90df-5e5d8d2162cb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "47c1c242-d190-4523-8033-307c5a9b7535" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.407s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.602808] env[63371]: DEBUG nova.network.neutron [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Successfully updated port: 17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1327.634343] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773552, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.747331] env[63371]: DEBUG nova.compute.manager [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Received event network-vif-plugged-3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1327.747331] env[63371]: DEBUG oslo_concurrency.lockutils [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] Acquiring lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1327.747417] env[63371]: DEBUG oslo_concurrency.lockutils [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1327.747599] env[63371]: DEBUG oslo_concurrency.lockutils [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.747677] env[63371]: DEBUG nova.compute.manager [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] No waiting events found dispatching network-vif-plugged-3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1327.747878] env[63371]: WARNING nova.compute.manager [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Received unexpected event network-vif-plugged-3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 for instance with vm_state building and task_state spawning. [ 1327.749035] env[63371]: DEBUG nova.compute.manager [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Received event network-changed-3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1327.749035] env[63371]: DEBUG nova.compute.manager [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Refreshing instance network info cache due to event network-changed-3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1327.749035] env[63371]: DEBUG oslo_concurrency.lockutils [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] Acquiring lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1327.749035] env[63371]: DEBUG oslo_concurrency.lockutils [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] Acquired lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1327.749296] env[63371]: DEBUG nova.network.neutron [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Refreshing network info cache for port 3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1327.754704] env[63371]: INFO nova.compute.manager [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Took 25.45 seconds to build instance. [ 1327.782258] env[63371]: DEBUG nova.network.neutron [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Updating instance_info_cache with network_info: [{"id": "d1b325d0-b864-44be-8fe4-b923489752d0", "address": "fa:16:3e:ff:96:d6", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1b325d0-b8", "ovs_interfaceid": "d1b325d0-b864-44be-8fe4-b923489752d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1327.907264] env[63371]: INFO nova.compute.manager [-] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Took 2.13 seconds to deallocate network for instance. [ 1327.988279] env[63371]: DEBUG nova.compute.utils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1327.994201] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1327.994201] env[63371]: DEBUG nova.network.neutron [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1328.055539] env[63371]: DEBUG nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1328.092088] env[63371]: DEBUG nova.policy [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '145668abb3514b8ea11c4fc6cf13d2cd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f0a4db7d709461ca32a5dc0ebabdf31', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1328.108232] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.108232] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.108232] env[63371]: DEBUG nova.network.neutron [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1328.136270] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773552, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.852265} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.142738] env[63371]: DEBUG nova.network.neutron [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Successfully updated port: 358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1328.142738] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed/f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1328.142738] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1328.142738] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25a56520-1ad5-454e-87f4-f750c02204b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.149433] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1328.149433] env[63371]: value = "task-1773553" [ 1328.149433] env[63371]: _type = "Task" [ 1328.149433] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.161524] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773553, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.203707] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "ca202079-2eae-441e-80f6-e403497e137d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.204065] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "ca202079-2eae-441e-80f6-e403497e137d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.204406] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "ca202079-2eae-441e-80f6-e403497e137d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.204653] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "ca202079-2eae-441e-80f6-e403497e137d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.204882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "ca202079-2eae-441e-80f6-e403497e137d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.211725] env[63371]: INFO nova.compute.manager [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Terminating instance [ 1328.216398] env[63371]: DEBUG nova.compute.manager [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1328.216672] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1328.216992] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "e912c210-3ae1-47ce-b9cd-afebf6195606" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.217299] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e912c210-3ae1-47ce-b9cd-afebf6195606" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1328.218212] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8391b27-07e7-4e66-b56e-213d95bab83b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.226847] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1328.227452] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36b46288-b5c6-4d83-972d-877733323477 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.234649] env[63371]: DEBUG oslo_vmware.api [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1328.234649] env[63371]: value = "task-1773554" [ 1328.234649] env[63371]: _type = "Task" [ 1328.234649] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.244731] env[63371]: DEBUG oslo_vmware.api [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773554, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.258637] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c36d7d40-b51b-49ff-91c1-e90a6528c183 tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.971s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1328.286332] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Releasing lock "refresh_cache-201a2d1e-9e2c-4c07-92be-200408874ad4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1328.287034] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Instance network_info: |[{"id": "d1b325d0-b864-44be-8fe4-b923489752d0", "address": "fa:16:3e:ff:96:d6", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1b325d0-b8", "ovs_interfaceid": "d1b325d0-b864-44be-8fe4-b923489752d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1328.287583] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:96:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1b325d0-b864-44be-8fe4-b923489752d0', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1328.296711] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Creating folder: Project (c9d19f4772ff46d3b3024851822cf833). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1328.297438] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a69aaba-9d49-47ee-b24c-a5f5e2d45e2d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.308859] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Created folder: Project (c9d19f4772ff46d3b3024851822cf833) in parent group-v368199. [ 1328.309086] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Creating folder: Instances. Parent ref: group-v368229. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1328.309362] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e00d421e-2d01-4b64-9a78-9462845fb5fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.318204] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Created folder: Instances in parent group-v368229. [ 1328.318447] env[63371]: DEBUG oslo.service.loopingcall [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1328.318632] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1328.318835] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-021039ec-0438-46b1-bd90-a1ef511642d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.342289] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1328.342289] env[63371]: value = "task-1773557" [ 1328.342289] env[63371]: _type = "Task" [ 1328.342289] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.353979] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773557, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.415420] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.494023] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1328.597657] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1328.603695] env[63371]: DEBUG nova.network.neutron [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Successfully created port: f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1328.627127] env[63371]: DEBUG nova.network.neutron [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updated VIF entry in instance network info cache for port 3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1328.627574] env[63371]: DEBUG nova.network.neutron [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance_info_cache with network_info: [{"id": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "address": "fa:16:3e:39:29:ca", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eac2e62-a1", "ovs_interfaceid": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1328.640841] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "refresh_cache-4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1328.641023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquired lock "refresh_cache-4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1328.641240] env[63371]: DEBUG nova.network.neutron [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1328.668567] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773553, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.219062} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.670643] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1328.671903] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a473eab0-cd98-4d8b-a112-17f04d5bd333 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.698091] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed/f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1328.699143] env[63371]: DEBUG nova.network.neutron [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1328.704448] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4039c97e-948f-4b4f-8142-59bbc6794ab6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.723941] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1328.723941] env[63371]: value = "task-1773559" [ 1328.723941] env[63371]: _type = "Task" [ 1328.723941] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.733209] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773559, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.746352] env[63371]: DEBUG oslo_vmware.api [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773554, 'name': PowerOffVM_Task, 'duration_secs': 0.386805} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.749701] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1328.749992] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1328.753849] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2cc40f24-b90f-42a0-94e2-056e05ca6a8e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.761594] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1328.858572] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773557, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.859110] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1328.859461] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1328.859745] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Deleting the datastore file [datastore1] ca202079-2eae-441e-80f6-e403497e137d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1328.860116] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ac0cf98-9d0e-4c9b-b5ef-d74fb7d28d82 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.868025] env[63371]: DEBUG oslo_vmware.api [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for the task: (returnval){ [ 1328.868025] env[63371]: value = "task-1773561" [ 1328.868025] env[63371]: _type = "Task" [ 1328.868025] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.880013] env[63371]: DEBUG oslo_vmware.api [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773561, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.964410] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08491401-1548-4e5f-b96d-5f636de10dec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.973331] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f26135a-8f68-47ea-85f9-fdce975786f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.014976] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff430d88-5bd2-4914-8daa-0e80c23ac2ff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.023384] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7d9de1-7335-4c73-b3e2-e83964b34bea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.038320] env[63371]: DEBUG nova.compute.provider_tree [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1329.130234] env[63371]: DEBUG oslo_concurrency.lockutils [req-6852258c-b318-4f00-9e22-c5cf91ea7c26 req-d6ea33fd-628d-4089-a8be-603779fe899f service nova] Releasing lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.191662] env[63371]: DEBUG nova.network.neutron [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1329.238616] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773559, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.256219] env[63371]: DEBUG nova.network.neutron [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Updating instance_info_cache with network_info: [{"id": "17aee217-e9ac-4d12-8821-73130231a498", "address": "fa:16:3e:02:c3:2a", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17aee217-e9", "ovs_interfaceid": "17aee217-e9ac-4d12-8821-73130231a498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.294921] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.362136] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773557, 'name': CreateVM_Task, 'duration_secs': 0.566007} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.362136] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1329.362136] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.362136] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.362136] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1329.362430] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b77a8c6-e50b-4890-989a-15e7a803da01 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.366546] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1329.366546] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae6ff4-7005-812d-55a5-c655621b0f43" [ 1329.366546] env[63371]: _type = "Task" [ 1329.366546] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.388886] env[63371]: DEBUG oslo_vmware.api [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Task: {'id': task-1773561, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268186} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.389192] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae6ff4-7005-812d-55a5-c655621b0f43, 'name': SearchDatastore_Task, 'duration_secs': 0.010563} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.391921] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1329.392145] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1329.392361] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1329.392491] env[63371]: INFO nova.compute.manager [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] [instance: ca202079-2eae-441e-80f6-e403497e137d] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1329.394619] env[63371]: DEBUG oslo.service.loopingcall [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1329.394619] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.394619] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1329.394619] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1329.394970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1329.394970] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1329.394970] env[63371]: DEBUG nova.compute.manager [-] [instance: ca202079-2eae-441e-80f6-e403497e137d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1329.394970] env[63371]: DEBUG nova.network.neutron [-] [instance: ca202079-2eae-441e-80f6-e403497e137d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1329.396637] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-65b98fd6-2a54-4723-9565-a67cdd1c53e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.406723] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1329.407029] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1329.408102] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cc1f6f4-7c5e-4158-b0f9-2af7f9c4dff6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.416877] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1329.416877] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d250ef-7a5f-c765-f39e-9bd40d692554" [ 1329.416877] env[63371]: _type = "Task" [ 1329.416877] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.426410] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d250ef-7a5f-c765-f39e-9bd40d692554, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.471329] env[63371]: DEBUG nova.network.neutron [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Updating instance_info_cache with network_info: [{"id": "358a8d7d-459f-49a9-b3c7-0cf811dd7e54", "address": "fa:16:3e:88:50:21", "network": {"id": "36f3a290-9a15-4a89-ad59-f55babc49d13", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-72446873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a9101ae72864e0b8af6c598153ff40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap358a8d7d-45", "ovs_interfaceid": "358a8d7d-459f-49a9-b3c7-0cf811dd7e54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1329.516179] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1329.542933] env[63371]: DEBUG nova.scheduler.client.report [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1329.552118] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1329.552537] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1329.553194] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1329.553665] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1329.554537] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1329.554537] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1329.555572] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1329.555942] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1329.557793] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1329.558121] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1329.558394] env[63371]: DEBUG nova.virt.hardware [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1329.562874] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46efe451-7712-4cfd-8dde-981c11434b7a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.571924] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196d9a48-e019-4b77-8bb6-1303ac8be38f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.739733] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773559, 'name': ReconfigVM_Task, 'duration_secs': 0.528636} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.739993] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Reconfigured VM instance instance-00000009 to attach disk [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed/f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1329.745061] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b608e1f8-4119-4629-b6cf-b6f387ee59e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.753020] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1329.753020] env[63371]: value = "task-1773562" [ 1329.753020] env[63371]: _type = "Task" [ 1329.753020] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.764349] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.764349] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Instance network_info: |[{"id": "17aee217-e9ac-4d12-8821-73130231a498", "address": "fa:16:3e:02:c3:2a", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17aee217-e9", "ovs_interfaceid": "17aee217-e9ac-4d12-8821-73130231a498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1329.764534] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773562, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.766320] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:c3:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17aee217-e9ac-4d12-8821-73130231a498', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.774272] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Creating folder: Project (c0f2fde472b14ab9a4d20947ca714191). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1329.774919] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e110ab0e-8b9a-4952-a84d-89aa7b3ee23b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.785069] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Created folder: Project (c0f2fde472b14ab9a4d20947ca714191) in parent group-v368199. [ 1329.785319] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Creating folder: Instances. Parent ref: group-v368232. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1329.785829] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70d2aca3-9cab-4d51-9a57-266178ed1a30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.798852] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Created folder: Instances in parent group-v368232. [ 1329.799251] env[63371]: DEBUG oslo.service.loopingcall [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1329.800280] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1329.800280] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80cd7cd6-c892-4ec1-9cf3-74623d943b1e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.822957] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1329.822957] env[63371]: value = "task-1773565" [ 1329.822957] env[63371]: _type = "Task" [ 1329.822957] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.835417] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773565, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.929305] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d250ef-7a5f-c765-f39e-9bd40d692554, 'name': SearchDatastore_Task, 'duration_secs': 0.011607} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.930275] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bb5e92e-a435-46a1-b82c-aaeaa4b71038 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.936398] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1329.936398] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52430c1a-19ae-e766-7743-4d28e2cae33d" [ 1329.936398] env[63371]: _type = "Task" [ 1329.936398] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1329.949496] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1329.949496] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1329.954342] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52430c1a-19ae-e766-7743-4d28e2cae33d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.974586] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Releasing lock "refresh_cache-4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1329.974873] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Instance network_info: |[{"id": "358a8d7d-459f-49a9-b3c7-0cf811dd7e54", "address": "fa:16:3e:88:50:21", "network": {"id": "36f3a290-9a15-4a89-ad59-f55babc49d13", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-72446873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a9101ae72864e0b8af6c598153ff40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap358a8d7d-45", "ovs_interfaceid": "358a8d7d-459f-49a9-b3c7-0cf811dd7e54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1329.975231] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:50:21', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e30245c5-78f5-48e6-b504-c6c21f5a9b45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '358a8d7d-459f-49a9-b3c7-0cf811dd7e54', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1329.987513] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Creating folder: Project (7a9101ae72864e0b8af6c598153ff40e). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1329.988265] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60425f07-40cb-4fea-9917-cda54c4695c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.999631] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Created folder: Project (7a9101ae72864e0b8af6c598153ff40e) in parent group-v368199. [ 1329.999631] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Creating folder: Instances. Parent ref: group-v368235. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1329.999631] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dacf8ebe-09a9-4ec8-8e75-fb8a2fabe430 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.009295] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Created folder: Instances in parent group-v368235. [ 1330.009534] env[63371]: DEBUG oslo.service.loopingcall [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1330.009730] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1330.009951] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d74a341-7cf0-4d73-a6b9-e5b18c2920d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.031892] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1330.031892] env[63371]: value = "task-1773568" [ 1330.031892] env[63371]: _type = "Task" [ 1330.031892] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.044529] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773568, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.068084] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.068084] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1330.069679] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.626s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.071791] env[63371]: INFO nova.compute.claims [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1330.177407] env[63371]: DEBUG nova.compute.manager [req-79b7ef65-4733-4ebb-bd43-7a9b6839018c req-1dda6a52-dc81-4941-9735-e12e6352e4e3 service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Received event network-vif-plugged-358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1330.177407] env[63371]: DEBUG oslo_concurrency.lockutils [req-79b7ef65-4733-4ebb-bd43-7a9b6839018c req-1dda6a52-dc81-4941-9735-e12e6352e4e3 service nova] Acquiring lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.177407] env[63371]: DEBUG oslo_concurrency.lockutils [req-79b7ef65-4733-4ebb-bd43-7a9b6839018c req-1dda6a52-dc81-4941-9735-e12e6352e4e3 service nova] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.177407] env[63371]: DEBUG oslo_concurrency.lockutils [req-79b7ef65-4733-4ebb-bd43-7a9b6839018c req-1dda6a52-dc81-4941-9735-e12e6352e4e3 service nova] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1330.177407] env[63371]: DEBUG nova.compute.manager [req-79b7ef65-4733-4ebb-bd43-7a9b6839018c req-1dda6a52-dc81-4941-9735-e12e6352e4e3 service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] No waiting events found dispatching network-vif-plugged-358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1330.177739] env[63371]: WARNING nova.compute.manager [req-79b7ef65-4733-4ebb-bd43-7a9b6839018c req-1dda6a52-dc81-4941-9735-e12e6352e4e3 service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Received unexpected event network-vif-plugged-358a8d7d-459f-49a9-b3c7-0cf811dd7e54 for instance with vm_state building and task_state spawning. [ 1330.262032] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773562, 'name': Rename_Task, 'duration_secs': 0.181971} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.262938] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1330.262938] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-23bd1c64-a9fc-4497-8e5d-024d9487fefb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.270681] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1330.270681] env[63371]: value = "task-1773569" [ 1330.270681] env[63371]: _type = "Task" [ 1330.270681] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.276666] env[63371]: DEBUG nova.network.neutron [-] [instance: ca202079-2eae-441e-80f6-e403497e137d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1330.280144] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773569, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.335065] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773565, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.453577] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52430c1a-19ae-e766-7743-4d28e2cae33d, 'name': SearchDatastore_Task, 'duration_secs': 0.032585} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.453983] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1330.454388] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 201a2d1e-9e2c-4c07-92be-200408874ad4/201a2d1e-9e2c-4c07-92be-200408874ad4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1330.454752] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-15d1acc4-057c-4794-9a5c-d862938ac562 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.463598] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1330.463598] env[63371]: value = "task-1773570" [ 1330.463598] env[63371]: _type = "Task" [ 1330.463598] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.475460] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.543789] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773568, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.577215] env[63371]: DEBUG nova.compute.utils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1330.582859] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1330.582859] env[63371]: DEBUG nova.network.neutron [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1330.661336] env[63371]: DEBUG nova.policy [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b22696705ee840cb8ecd18e5abcec19c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5c5bf80b8e64c8795da4d79d6a89150', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1330.787814] env[63371]: INFO nova.compute.manager [-] [instance: ca202079-2eae-441e-80f6-e403497e137d] Took 1.39 seconds to deallocate network for instance. [ 1330.788360] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773569, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.840298] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773565, 'name': CreateVM_Task, 'duration_secs': 0.522558} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1330.840298] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1330.840298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1330.840298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1330.840298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1330.840298] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c38c8d36-8b57-46ab-8fc7-bf7f2afec7bd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1330.846940] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1330.846940] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52adfd61-3c72-f9c5-d3ac-b389e91c7fec" [ 1330.846940] env[63371]: _type = "Task" [ 1330.846940] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1330.852951] env[63371]: DEBUG nova.network.neutron [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Successfully updated port: f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1330.859955] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52adfd61-3c72-f9c5-d3ac-b389e91c7fec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1330.872439] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "af1281ba-c3be-43b4-a039-86d94bd9efe4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1330.872656] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1330.978168] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773570, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.046411] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773568, 'name': CreateVM_Task, 'duration_secs': 0.529883} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.046672] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1331.047396] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.082507] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1331.115307] env[63371]: DEBUG nova.network.neutron [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Successfully created port: 1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1331.152983] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.153425] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.289980] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773569, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.299728] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.356959] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52adfd61-3c72-f9c5-d3ac-b389e91c7fec, 'name': SearchDatastore_Task, 'duration_secs': 0.052465} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.360444] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.360653] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1331.360890] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.361043] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.361236] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1331.362533] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.362533] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquired lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.362533] env[63371]: DEBUG nova.network.neutron [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1331.363324] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.366762] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1331.366762] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dab59758-f263-485f-a913-82c81fa5940a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.366762] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb3035f6-e13f-4430-b935-54612cb68766 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.371524] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1331.371524] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae0a15-757b-9d23-9bc2-f7254279b4ff" [ 1331.371524] env[63371]: _type = "Task" [ 1331.371524] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.384380] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae0a15-757b-9d23-9bc2-f7254279b4ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.400881] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1331.401110] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1331.401934] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04c5ca38-7b1e-42fc-be3a-0ac18e195d00 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.408633] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1331.408633] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523ee850-e65e-5726-a010-d290ed6293bd" [ 1331.408633] env[63371]: _type = "Task" [ 1331.408633] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.421335] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523ee850-e65e-5726-a010-d290ed6293bd, 'name': SearchDatastore_Task, 'duration_secs': 0.010073} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.422206] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5001994c-4b0c-42e6-91aa-de80bc2e5fa0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.430610] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1331.430610] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ffb1e3-e2a9-968a-30c7-6eb71f441061" [ 1331.430610] env[63371]: _type = "Task" [ 1331.430610] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.438744] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ffb1e3-e2a9-968a-30c7-6eb71f441061, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.461933] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Received event network-vif-plugged-d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1331.462167] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquiring lock "201a2d1e-9e2c-4c07-92be-200408874ad4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.462690] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.463171] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.463725] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] No waiting events found dispatching network-vif-plugged-d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1331.463725] env[63371]: WARNING nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Received unexpected event network-vif-plugged-d1b325d0-b864-44be-8fe4-b923489752d0 for instance with vm_state building and task_state spawning. [ 1331.463882] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Received event network-changed-d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1331.464061] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Refreshing instance network info cache due to event network-changed-d1b325d0-b864-44be-8fe4-b923489752d0. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1331.464257] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquiring lock "refresh_cache-201a2d1e-9e2c-4c07-92be-200408874ad4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.464393] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquired lock "refresh_cache-201a2d1e-9e2c-4c07-92be-200408874ad4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.464546] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Refreshing network info cache for port d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1331.485819] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773570, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.614437} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.487615] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 201a2d1e-9e2c-4c07-92be-200408874ad4/201a2d1e-9e2c-4c07-92be-200408874ad4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1331.487847] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1331.488662] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2d85d73-9bed-497c-a8e4-a2ceb70e24e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.491960] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d42a6e34-612b-46ae-9923-ad706c010cf6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.500456] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e6201f-350b-4a1e-9d4c-a06697750355 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.504409] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1331.504409] env[63371]: value = "task-1773572" [ 1331.504409] env[63371]: _type = "Task" [ 1331.504409] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.538254] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b79e20c4-f4ef-4b09-937b-3410cf0f96b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.544431] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773572, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.549926] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5044b0a-f713-4b03-b2c4-251643125a7d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.565881] env[63371]: DEBUG nova.compute.provider_tree [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1331.770590] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "fc0715a1-a056-4a1b-a86e-959680effc97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.770847] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "fc0715a1-a056-4a1b-a86e-959680effc97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.771466] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "fc0715a1-a056-4a1b-a86e-959680effc97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1331.771466] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "fc0715a1-a056-4a1b-a86e-959680effc97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1331.771983] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "fc0715a1-a056-4a1b-a86e-959680effc97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1331.774156] env[63371]: INFO nova.compute.manager [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Terminating instance [ 1331.776060] env[63371]: DEBUG nova.compute.manager [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1331.776181] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1331.781511] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251b3359-6efc-4826-b65a-3bcaea98a381 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.787807] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1331.791089] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13d6d1ec-2788-4943-bb00-ca932b5ebf6d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.792869] env[63371]: DEBUG oslo_vmware.api [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773569, 'name': PowerOnVM_Task, 'duration_secs': 1.147822} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.793280] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1331.793376] env[63371]: INFO nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Took 11.82 seconds to spawn the instance on the hypervisor. [ 1331.793556] env[63371]: DEBUG nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1331.794726] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fda0edb-2a9d-4718-a7d9-f04d0066a2d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.798595] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1331.798595] env[63371]: value = "task-1773573" [ 1331.798595] env[63371]: _type = "Task" [ 1331.798595] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.826187] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773573, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.885661] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae0a15-757b-9d23-9bc2-f7254279b4ff, 'name': SearchDatastore_Task, 'duration_secs': 0.036884} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.885971] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.886232] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1331.886479] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1331.922189] env[63371]: DEBUG nova.network.neutron [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1331.942867] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ffb1e3-e2a9-968a-30c7-6eb71f441061, 'name': SearchDatastore_Task, 'duration_secs': 0.033103} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1331.943187] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1331.943403] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/33cf00ea-3195-41cf-9b7a-a8e64496a122.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1331.944392] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1331.944392] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1331.944392] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df055bda-a9bb-4b03-8283-0de8983eff6c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.946219] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c4e06cd-e038-4ecd-82b4-c778509e1c23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.954284] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1331.954284] env[63371]: value = "task-1773574" [ 1331.954284] env[63371]: _type = "Task" [ 1331.954284] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.958241] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1331.958494] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1331.959677] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfe36660-386d-41dc-8eca-0c4de2a57aad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.964957] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773574, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1331.967785] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1331.967785] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd259b-956c-b78b-25b4-37aeba13f5e7" [ 1331.967785] env[63371]: _type = "Task" [ 1331.967785] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.977339] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd259b-956c-b78b-25b4-37aeba13f5e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.020438] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773572, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096234} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.020700] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1332.021519] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e5f86b-dbbb-483c-bd69-ce9c89c33560 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.049362] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] 201a2d1e-9e2c-4c07-92be-200408874ad4/201a2d1e-9e2c-4c07-92be-200408874ad4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1332.049714] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ceac9cc-37c2-4926-bf4b-24775e9e4ba7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.070510] env[63371]: DEBUG nova.scheduler.client.report [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1332.075131] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1332.075131] env[63371]: value = "task-1773575" [ 1332.075131] env[63371]: _type = "Task" [ 1332.075131] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.090156] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.096740] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1332.125739] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:31:05Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='318065608',id=19,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-2139020529',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1332.126022] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1332.126022] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1332.126198] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1332.126291] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1332.126435] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1332.126646] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1332.126823] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1332.126991] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1332.127178] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1332.127377] env[63371]: DEBUG nova.virt.hardware [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1332.128300] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fef783c-d7d6-4cf8-9fcd-67e0cd6a6cef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.137817] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5299f083-fc71-4430-9621-5f66019145b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.238967] env[63371]: DEBUG nova.network.neutron [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updating instance_info_cache with network_info: [{"id": "f65a228f-d220-4478-a274-65cee7a3df3c", "address": "fa:16:3e:60:b5:56", "network": {"id": "c7fb5c0c-158e-4552-8360-a944e8eff32d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-951711523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f0a4db7d709461ca32a5dc0ebabdf31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf65a228f-d2", "ovs_interfaceid": "f65a228f-d220-4478-a274-65cee7a3df3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.308715] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773573, 'name': PowerOffVM_Task, 'duration_secs': 0.387156} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.309404] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1332.309661] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1332.309930] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e7b52e8-b70e-4f1c-9b33-353bc87b306b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.331429] env[63371]: INFO nova.compute.manager [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Took 28.63 seconds to build instance. [ 1332.421532] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1332.421816] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1332.422047] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleting the datastore file [datastore1] fc0715a1-a056-4a1b-a86e-959680effc97 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1332.422358] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44c3755c-9b0d-4fec-adec-42c7fd6ede0b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.436616] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1332.436616] env[63371]: value = "task-1773577" [ 1332.436616] env[63371]: _type = "Task" [ 1332.436616] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.446927] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773577, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.465538] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773574, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.482209] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd259b-956c-b78b-25b4-37aeba13f5e7, 'name': SearchDatastore_Task, 'duration_secs': 0.04533} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.482933] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbbfe4f4-2a6b-43c5-890e-5aab1a3e7613 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.489703] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1332.489703] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d52f33-dd0d-e7cf-6bb6-d6b18a64a03a" [ 1332.489703] env[63371]: _type = "Task" [ 1332.489703] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.503777] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d52f33-dd0d-e7cf-6bb6-d6b18a64a03a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.545988] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Updated VIF entry in instance network info cache for port d1b325d0-b864-44be-8fe4-b923489752d0. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1332.545988] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Updating instance_info_cache with network_info: [{"id": "d1b325d0-b864-44be-8fe4-b923489752d0", "address": "fa:16:3e:ff:96:d6", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.150", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1b325d0-b8", "ovs_interfaceid": "d1b325d0-b864-44be-8fe4-b923489752d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1332.582211] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.582884] env[63371]: DEBUG nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1332.589125] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.803s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1332.591067] env[63371]: DEBUG nova.objects.instance [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lazy-loading 'resources' on Instance uuid cffe6a79-ad7e-4488-b179-608a03c978aa {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1332.617253] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.742708] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Releasing lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1332.743041] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Instance network_info: |[{"id": "f65a228f-d220-4478-a274-65cee7a3df3c", "address": "fa:16:3e:60:b5:56", "network": {"id": "c7fb5c0c-158e-4552-8360-a944e8eff32d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-951711523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f0a4db7d709461ca32a5dc0ebabdf31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf65a228f-d2", "ovs_interfaceid": "f65a228f-d220-4478-a274-65cee7a3df3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1332.744674] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:b5:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69744f59-ecac-4b0b-831e-82a274d7acbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f65a228f-d220-4478-a274-65cee7a3df3c', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1332.754192] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Creating folder: Project (1f0a4db7d709461ca32a5dc0ebabdf31). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1332.754533] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-51376af7-09e0-44d8-88cb-20dd6450f890 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.770095] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Created folder: Project (1f0a4db7d709461ca32a5dc0ebabdf31) in parent group-v368199. [ 1332.770095] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Creating folder: Instances. Parent ref: group-v368238. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1332.771475] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93c664fa-4e31-41b9-811d-5835f3049e55 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.779479] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Created folder: Instances in parent group-v368238. [ 1332.779728] env[63371]: DEBUG oslo.service.loopingcall [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1332.780070] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1332.780187] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-adc8093c-50df-4075-aeb0-41628e373a92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.806130] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1332.806130] env[63371]: value = "task-1773580" [ 1332.806130] env[63371]: _type = "Task" [ 1332.806130] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.819544] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773580, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.836497] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37c549b4-c581-4977-88d3-33679b1e3d49 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.141s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1332.946363] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773577, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.976321] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773574, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.79691} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.976791] env[63371]: DEBUG nova.network.neutron [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Successfully updated port: 1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1332.978096] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/33cf00ea-3195-41cf-9b7a-a8e64496a122.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1332.978262] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1332.979742] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ac8ebbe3-ea31-400f-8ca1-c3a757f02c34 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.986395] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1332.986395] env[63371]: value = "task-1773581" [ 1332.986395] env[63371]: _type = "Task" [ 1332.986395] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.004554] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773581, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.011216] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d52f33-dd0d-e7cf-6bb6-d6b18a64a03a, 'name': SearchDatastore_Task, 'duration_secs': 0.068536} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.011498] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1333.011778] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d/4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1333.012327] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09ccac86-c750-4b25-b23b-ab4237896b78 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.020747] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1333.020747] env[63371]: value = "task-1773582" [ 1333.020747] env[63371]: _type = "Task" [ 1333.020747] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.033179] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.050493] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Releasing lock "refresh_cache-201a2d1e-9e2c-4c07-92be-200408874ad4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1333.050493] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Received event network-vif-plugged-17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1333.050493] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquiring lock "33cf00ea-3195-41cf-9b7a-a8e64496a122-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.050778] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.050814] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.051033] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] No waiting events found dispatching network-vif-plugged-17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1333.051147] env[63371]: WARNING nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Received unexpected event network-vif-plugged-17aee217-e9ac-4d12-8821-73130231a498 for instance with vm_state building and task_state spawning. [ 1333.051326] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Received event network-vif-deleted-8905eb18-7130-4195-b35c-38e03dd31b91 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1333.051514] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Received event network-changed-17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1333.051771] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Refreshing instance network info cache due to event network-changed-17aee217-e9ac-4d12-8821-73130231a498. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1333.051894] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquiring lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.051973] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquired lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.052519] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Refreshing network info cache for port 17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1333.090747] env[63371]: DEBUG nova.compute.utils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1333.096811] env[63371]: DEBUG nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Not allocating networking since 'none' was specified. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1333.103716] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773575, 'name': ReconfigVM_Task, 'duration_secs': 0.658377} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.103932] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Reconfigured VM instance instance-0000000a to attach disk [datastore1] 201a2d1e-9e2c-4c07-92be-200408874ad4/201a2d1e-9e2c-4c07-92be-200408874ad4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1333.104678] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97579428-e793-46a3-a036-6fba503a071b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.115459] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1333.115459] env[63371]: value = "task-1773583" [ 1333.115459] env[63371]: _type = "Task" [ 1333.115459] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.128687] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773583, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.307827] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "d9523239-79d1-434f-977a-e1f0e358c82b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.308109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "d9523239-79d1-434f-977a-e1f0e358c82b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.308330] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "d9523239-79d1-434f-977a-e1f0e358c82b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1333.308917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "d9523239-79d1-434f-977a-e1f0e358c82b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1333.308917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "d9523239-79d1-434f-977a-e1f0e358c82b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1333.312962] env[63371]: INFO nova.compute.manager [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Terminating instance [ 1333.321096] env[63371]: DEBUG nova.compute.manager [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1333.321424] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1333.325907] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21a4467-aec0-425e-a4d2-c6c817205dcd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.337019] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773580, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.338849] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1333.341560] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1333.342032] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-618b4f58-c162-463f-a8de-ebd6a15e04a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.361031] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1333.361031] env[63371]: value = "task-1773584" [ 1333.361031] env[63371]: _type = "Task" [ 1333.361031] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.377645] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.449863] env[63371]: DEBUG oslo_vmware.api [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773577, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.541654} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.452699] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1333.452922] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1333.453150] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1333.453364] env[63371]: INFO nova.compute.manager [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1333.453852] env[63371]: DEBUG oslo.service.loopingcall [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1333.453998] env[63371]: DEBUG nova.compute.manager [-] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1333.454108] env[63371]: DEBUG nova.network.neutron [-] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1333.484010] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.484010] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.484010] env[63371]: DEBUG nova.network.neutron [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1333.510586] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773581, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.154189} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.515252] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1333.518678] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7270b58-1f20-4b15-abf3-b75dc1d0bb98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.543373] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/33cf00ea-3195-41cf-9b7a-a8e64496a122.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1333.545858] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c362ad2-69f6-48b9-bac2-bcd36d197734 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.565213] env[63371]: DEBUG nova.compute.manager [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Received event network-changed-358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1333.565213] env[63371]: DEBUG nova.compute.manager [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Refreshing instance network info cache due to event network-changed-358a8d7d-459f-49a9-b3c7-0cf811dd7e54. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1333.565506] env[63371]: DEBUG oslo_concurrency.lockutils [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] Acquiring lock "refresh_cache-4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1333.565660] env[63371]: DEBUG oslo_concurrency.lockutils [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] Acquired lock "refresh_cache-4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1333.565915] env[63371]: DEBUG nova.network.neutron [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Refreshing network info cache for port 358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1333.578697] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773582, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.589226] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1333.589226] env[63371]: value = "task-1773585" [ 1333.589226] env[63371]: _type = "Task" [ 1333.589226] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.600418] env[63371]: DEBUG nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1333.609517] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773585, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.634500] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773583, 'name': Rename_Task, 'duration_secs': 0.407442} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1333.635016] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1333.635212] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b474de6-5783-4103-b61b-10f0c7b6f989 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.644878] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1333.644878] env[63371]: value = "task-1773586" [ 1333.644878] env[63371]: _type = "Task" [ 1333.644878] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1333.659464] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773586, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.684880] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02993a8f-1a75-4fc5-b6c7-13ac3cd5de3c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.705025] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25838425-092e-469b-81f5-346ba8b49089 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.743919] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e39ef8-26c4-4e46-9287-0f61b44fee4b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.753634] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef12de85-e17d-4e31-8fa5-e3732cfa6fb9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.773565] env[63371]: DEBUG nova.compute.provider_tree [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1333.823475] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773580, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.873953] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1333.876417] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.033993] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773582, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.88023} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.034329] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d/4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1334.034643] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1334.035162] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eea36f8a-a4f6-4efd-9639-edcfaf892111 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.045102] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1334.045102] env[63371]: value = "task-1773587" [ 1334.045102] env[63371]: _type = "Task" [ 1334.045102] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.055792] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773587, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.087454] env[63371]: DEBUG nova.network.neutron [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1334.105605] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "76c861a7-30f2-40f4-b723-7912975f36f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1334.106095] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "76c861a7-30f2-40f4-b723-7912975f36f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.106499] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773585, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.117510] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Updated VIF entry in instance network info cache for port 17aee217-e9ac-4d12-8821-73130231a498. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1334.118274] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Updating instance_info_cache with network_info: [{"id": "17aee217-e9ac-4d12-8821-73130231a498", "address": "fa:16:3e:02:c3:2a", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17aee217-e9", "ovs_interfaceid": "17aee217-e9ac-4d12-8821-73130231a498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.161178] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773586, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.283299] env[63371]: DEBUG nova.scheduler.client.report [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1334.324153] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773580, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.372665] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.562276] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773587, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.358388} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.562594] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1334.564081] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47bba49b-582c-4f4c-9b08-20bfd2e541a1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.599310] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d/4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1334.600961] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22681644-cce8-40c4-9c71-5bdce0c1dbf2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.623022] env[63371]: DEBUG nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1334.625994] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Releasing lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1334.631039] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Received event network-changed-85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1334.631039] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Refreshing instance network info cache due to event network-changed-85189d02-f613-4d29-a47a-b7c1ce74c9f3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1334.631039] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquiring lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.631039] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Acquired lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.631039] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Refreshing network info cache for port 85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1334.638371] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773585, 'name': ReconfigVM_Task, 'duration_secs': 0.768248} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.641209] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/33cf00ea-3195-41cf-9b7a-a8e64496a122.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1334.642621] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1334.642621] env[63371]: value = "task-1773588" [ 1334.642621] env[63371]: _type = "Task" [ 1334.642621] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.642796] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55145a34-b7a2-4154-9344-696e3ef3c4e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.670809] env[63371]: DEBUG oslo_vmware.api [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773586, 'name': PowerOnVM_Task, 'duration_secs': 1.018626} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.671108] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773588, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.673685] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1334.674236] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1334.674236] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1334.674236] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1334.674383] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1334.674523] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1334.674729] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1334.674883] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1334.675058] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1334.675247] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1334.675419] env[63371]: DEBUG nova.virt.hardware [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1334.676867] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1334.677131] env[63371]: INFO nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Took 12.70 seconds to spawn the instance on the hypervisor. [ 1334.677321] env[63371]: DEBUG nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1334.678355] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ddf95c-797c-4a03-af2b-df73b8a1534d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.682826] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1334.682826] env[63371]: value = "task-1773589" [ 1334.682826] env[63371]: _type = "Task" [ 1334.682826] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.684052] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b37bb00-ab77-4dd7-b4a4-a7cea52fc37b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.696252] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a887f1f-c90f-4aaf-bb64-766ad7a62d73 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.709052] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773589, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.727130] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1334.738615] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Creating folder: Project (8cce5d6d9b0e401caf02074dc66c16a6). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1334.740219] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8737b8a5-3cc1-478d-80a3-40be5b154d49 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.753770] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Created folder: Project (8cce5d6d9b0e401caf02074dc66c16a6) in parent group-v368199. [ 1334.753979] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Creating folder: Instances. Parent ref: group-v368241. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1334.754233] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bf5e9975-1802-4190-9c2b-315150f04c05 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.773624] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Created folder: Instances in parent group-v368241. [ 1334.773869] env[63371]: DEBUG oslo.service.loopingcall [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1334.774258] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1334.774362] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47adeb32-602b-4c34-99dd-ea358ad09286 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.789344] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.200s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1334.793237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.656s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1334.793919] env[63371]: INFO nova.compute.claims [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1334.798423] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1334.798423] env[63371]: value = "task-1773592" [ 1334.798423] env[63371]: _type = "Task" [ 1334.798423] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.815302] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773592, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.821519] env[63371]: DEBUG nova.network.neutron [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Updating instance_info_cache with network_info: [{"id": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "address": "fa:16:3e:81:c5:8b", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1846a8cd-46", "ovs_interfaceid": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1334.826022] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773580, 'name': CreateVM_Task, 'duration_secs': 2.004616} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.826487] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1334.827255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1334.827446] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1334.828040] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1334.828040] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d069a988-6a22-4e0e-8bbe-e2af6ff8cda9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.830841] env[63371]: INFO nova.scheduler.client.report [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Deleted allocations for instance cffe6a79-ad7e-4488-b179-608a03c978aa [ 1334.839289] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1334.839289] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5230558e-3e67-7fd5-6185-85de0bf48239" [ 1334.839289] env[63371]: _type = "Task" [ 1334.839289] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.848673] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5230558e-3e67-7fd5-6185-85de0bf48239, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1334.872564] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773584, 'name': PowerOffVM_Task, 'duration_secs': 1.208953} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1334.872831] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1334.872994] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1334.873253] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22984b90-12f7-4331-9ef8-874aeee625ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.972014] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1334.972014] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1334.972160] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleting the datastore file [datastore1] d9523239-79d1-434f-977a-e1f0e358c82b {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1334.972428] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d1fc5b2-60b5-4dd1-be64-ca831c58ebd8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.981542] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1334.981542] env[63371]: value = "task-1773594" [ 1334.981542] env[63371]: _type = "Task" [ 1334.981542] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1334.990825] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773594, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.011091] env[63371]: DEBUG nova.network.neutron [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Updated VIF entry in instance network info cache for port 358a8d7d-459f-49a9-b3c7-0cf811dd7e54. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1335.011476] env[63371]: DEBUG nova.network.neutron [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Updating instance_info_cache with network_info: [{"id": "358a8d7d-459f-49a9-b3c7-0cf811dd7e54", "address": "fa:16:3e:88:50:21", "network": {"id": "36f3a290-9a15-4a89-ad59-f55babc49d13", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-72446873-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7a9101ae72864e0b8af6c598153ff40e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap358a8d7d-45", "ovs_interfaceid": "358a8d7d-459f-49a9-b3c7-0cf811dd7e54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.161478] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773588, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.220545] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773589, 'name': Rename_Task, 'duration_secs': 0.267444} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.220810] env[63371]: DEBUG nova.network.neutron [-] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.221793] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1335.222169] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a706364-6c05-47d0-b231-7141bf00a9fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.227706] env[63371]: INFO nova.compute.manager [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Took 30.00 seconds to build instance. [ 1335.236070] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1335.236070] env[63371]: value = "task-1773595" [ 1335.236070] env[63371]: _type = "Task" [ 1335.236070] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.248103] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773595, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.318482] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773592, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.327703] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Releasing lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.328177] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Instance network_info: |[{"id": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "address": "fa:16:3e:81:c5:8b", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1846a8cd-46", "ovs_interfaceid": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1335.328466] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:c5:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1846a8cd-46dc-4187-af60-d4e4eee750dc', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1335.340199] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Creating folder: Project (b5c5bf80b8e64c8795da4d79d6a89150). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1335.343616] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28567f13-d83a-456e-b022-cc9eacf692d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.347065] env[63371]: DEBUG oslo_concurrency.lockutils [None req-57b8bba2-b061-432f-957a-0674e1ab6ba8 tempest-DeleteServersAdminTestJSON-1470194405 tempest-DeleteServersAdminTestJSON-1470194405-project-admin] Lock "cffe6a79-ad7e-4488-b179-608a03c978aa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.699s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.361325] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5230558e-3e67-7fd5-6185-85de0bf48239, 'name': SearchDatastore_Task, 'duration_secs': 0.030935} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.361648] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.361888] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1335.362149] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.362312] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.362495] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1335.363196] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-973d886e-d03f-45cb-a3cd-200a19b244c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.367421] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Created folder: Project (b5c5bf80b8e64c8795da4d79d6a89150) in parent group-v368199. [ 1335.367664] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Creating folder: Instances. Parent ref: group-v368244. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1335.368333] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44b3d297-e3f4-4455-ae97-e75b5e1a04b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.383934] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Created folder: Instances in parent group-v368244. [ 1335.384163] env[63371]: DEBUG oslo.service.loopingcall [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1335.384361] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1335.384571] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4241be9-61e2-4db8-94d1-84194f1f11e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.404085] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1335.404292] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1335.405457] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14dce815-e752-45b3-9e4a-c33627332131 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.411846] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1335.411846] env[63371]: value = "task-1773598" [ 1335.411846] env[63371]: _type = "Task" [ 1335.411846] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.416051] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1335.416051] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52800966-d064-5e7c-e6f5-8a4028cae0c1" [ 1335.416051] env[63371]: _type = "Task" [ 1335.416051] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.422755] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773598, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.428568] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52800966-d064-5e7c-e6f5-8a4028cae0c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.497723] env[63371]: DEBUG oslo_vmware.api [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773594, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.403639} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.497987] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1335.498218] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1335.498428] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1335.498602] env[63371]: INFO nova.compute.manager [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Took 2.18 seconds to destroy the instance on the hypervisor. [ 1335.498841] env[63371]: DEBUG oslo.service.loopingcall [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1335.499181] env[63371]: DEBUG nova.compute.manager [-] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1335.499296] env[63371]: DEBUG nova.network.neutron [-] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1335.514514] env[63371]: DEBUG oslo_concurrency.lockutils [req-e1fad9aa-5853-41e1-9fe7-9c995af17d50 req-85955416-13c8-40ca-8747-37ab12f7b03f service nova] Releasing lock "refresh_cache-4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1335.657980] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updated VIF entry in instance network info cache for port 85189d02-f613-4d29-a47a-b7c1ce74c9f3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1335.658389] env[63371]: DEBUG nova.network.neutron [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updating instance_info_cache with network_info: [{"id": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "address": "fa:16:3e:52:9a:b1", "network": {"id": "c7291076-10b1-479b-a360-a5d60b016548", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-373506764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.158", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e27f48936d4019bd23bc30cd94f85b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85189d02-f6", "ovs_interfaceid": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1335.662883] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773588, 'name': ReconfigVM_Task, 'duration_secs': 0.659622} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.665744] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d/4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1335.666389] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8510e9ad-bcdd-4a67-b139-bf631608bfad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.675634] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1335.675634] env[63371]: value = "task-1773599" [ 1335.675634] env[63371]: _type = "Task" [ 1335.675634] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.690785] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773599, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.725881] env[63371]: INFO nova.compute.manager [-] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Took 2.27 seconds to deallocate network for instance. [ 1335.736662] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7f0c529a-6909-44ab-8ce0-75f8555096de tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.514s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.750060] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773595, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.778567] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Received event network-vif-plugged-f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1335.778912] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1335.779266] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1335.779445] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1335.779655] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] No waiting events found dispatching network-vif-plugged-f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1335.779775] env[63371]: WARNING nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Received unexpected event network-vif-plugged-f65a228f-d220-4478-a274-65cee7a3df3c for instance with vm_state building and task_state spawning. [ 1335.779930] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Received event network-changed-f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1335.780090] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Refreshing instance network info cache due to event network-changed-f65a228f-d220-4478-a274-65cee7a3df3c. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1335.780269] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquiring lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.780395] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquired lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.780541] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Refreshing network info cache for port f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1335.820588] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773592, 'name': CreateVM_Task, 'duration_secs': 0.593337} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.820934] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1335.821660] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.821990] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1335.822554] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1335.823000] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9e2f86e-d4a2-4923-a34c-607f18fc47f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.834347] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1335.834347] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5251bf1f-595d-405e-181b-18fb6b964e5d" [ 1335.834347] env[63371]: _type = "Task" [ 1335.834347] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.847176] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5251bf1f-595d-405e-181b-18fb6b964e5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.930469] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773598, 'name': CreateVM_Task, 'duration_secs': 0.498932} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.934075] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1335.934537] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52800966-d064-5e7c-e6f5-8a4028cae0c1, 'name': SearchDatastore_Task, 'duration_secs': 0.017079} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1335.935238] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1335.936064] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-325c4391-24a0-4b87-943e-d30747f2910b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1335.945676] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1335.945676] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528d1add-3003-da03-e4c8-b9679f2074c4" [ 1335.945676] env[63371]: _type = "Task" [ 1335.945676] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1335.956840] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528d1add-3003-da03-e4c8-b9679f2074c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1335.968034] env[63371]: DEBUG nova.compute.manager [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Stashing vm_state: active {{(pid=63371) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1336.166249] env[63371]: DEBUG oslo_concurrency.lockutils [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] Releasing lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.166673] env[63371]: DEBUG nova.compute.manager [req-baf4f522-e6ae-4488-81f4-c538145a84e8 req-b4b808b0-e5f3-4aa1-8a83-fbf7d0b2a4f7 service nova] [instance: ca202079-2eae-441e-80f6-e403497e137d] Received event network-vif-deleted-eb0a9632-9bb3-4855-8ad5-af6c7a628900 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1336.187829] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773599, 'name': Rename_Task, 'duration_secs': 0.207209} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.187829] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1336.187977] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c1361f27-2815-4470-a19f-2ea49bc437f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.195765] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1336.195765] env[63371]: value = "task-1773600" [ 1336.195765] env[63371]: _type = "Task" [ 1336.195765] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.204823] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773600, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.235992] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.236403] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1336.259706] env[63371]: DEBUG oslo_vmware.api [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773595, 'name': PowerOnVM_Task, 'duration_secs': 0.704523} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.260445] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1336.261581] env[63371]: INFO nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Took 11.76 seconds to spawn the instance on the hypervisor. [ 1336.261581] env[63371]: DEBUG nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1336.262745] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493ca6bd-1418-43f6-825a-17786e9a990b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.280155] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5566dace-d588-4d81-84ef-b89b2a60886f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.293251] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d95fabb-134c-416a-b297-c81b6603b8a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.335033] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35a0808-84a8-4887-ac46-98dd0bb72511 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.353928] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5251bf1f-595d-405e-181b-18fb6b964e5d, 'name': SearchDatastore_Task, 'duration_secs': 0.018081} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.354216] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquiring lock "e00c2e45-b8bc-440b-8b58-a21f127192c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.354395] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1336.355721] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe2edf8f-e601-483d-b776-f36e9d850cd7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.360646] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.360646] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1336.360646] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1336.361369] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.361727] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1336.362337] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85804f55-911c-4034-8ac6-d19eb77ef5ff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.374644] env[63371]: DEBUG nova.compute.provider_tree [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1336.380018] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1336.380018] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c4b4e9-167e-4be9-3606-f45178e8a16f" [ 1336.380018] env[63371]: _type = "Task" [ 1336.380018] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.393641] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c4b4e9-167e-4be9-3606-f45178e8a16f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.458603] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528d1add-3003-da03-e4c8-b9679f2074c4, 'name': SearchDatastore_Task, 'duration_secs': 0.021962} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1336.458958] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1336.459165] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cbcdfe1a-86a4-4a12-99b5-44d291d41769/cbcdfe1a-86a4-4a12-99b5-44d291d41769.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1336.459482] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1336.459672] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1336.459904] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dae5235b-38cf-4532-894b-190b24dea388 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.461947] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1265596e-b533-4edf-a0ae-396ff0d33f76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.474452] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1336.474452] env[63371]: value = "task-1773601" [ 1336.474452] env[63371]: _type = "Task" [ 1336.474452] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.478794] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1336.479085] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1336.480113] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e47905c-038c-48a9-9b50-c2337ccbc80d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1336.485709] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773601, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.486929] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.488953] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1336.488953] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5288760d-bc0a-2c32-5814-0c7fb72b45ae" [ 1336.488953] env[63371]: _type = "Task" [ 1336.488953] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1336.498509] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5288760d-bc0a-2c32-5814-0c7fb72b45ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.716497] env[63371]: DEBUG nova.network.neutron [-] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.721092] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773600, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1336.776271] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1336.802577] env[63371]: INFO nova.compute.manager [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Took 18.91 seconds to build instance. [ 1336.847132] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updated VIF entry in instance network info cache for port f65a228f-d220-4478-a274-65cee7a3df3c. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1336.848123] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updating instance_info_cache with network_info: [{"id": "f65a228f-d220-4478-a274-65cee7a3df3c", "address": "fa:16:3e:60:b5:56", "network": {"id": "c7fb5c0c-158e-4552-8360-a944e8eff32d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-951711523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f0a4db7d709461ca32a5dc0ebabdf31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf65a228f-d2", "ovs_interfaceid": "f65a228f-d220-4478-a274-65cee7a3df3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1336.882968] env[63371]: DEBUG nova.scheduler.client.report [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1337.558671] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.767s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.563354] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1337.565871] env[63371]: INFO nova.compute.manager [-] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Took 2.07 seconds to deallocate network for instance. [ 1337.566365] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf99720-ab89-40f6-a2e3-a623bc09d8ff tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.196s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1337.566673] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Releasing lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1337.566909] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Received event network-changed-9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1337.567139] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Refreshing instance network info cache due to event network-changed-9a4b63df-9697-47a1-81ad-c69476a80975. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1337.567383] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquiring lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.567524] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquired lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.570450] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Refreshing network info cache for port 9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1337.572496] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.156s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.572496] env[63371]: DEBUG nova.objects.instance [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lazy-loading 'resources' on Instance uuid 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1337.589786] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1337.589786] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1337.597749] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c4b4e9-167e-4be9-3606-f45178e8a16f, 'name': SearchDatastore_Task, 'duration_secs': 0.02656} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.607879] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1337.608178] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1337.608426] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.609069] env[63371]: DEBUG oslo_vmware.api [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773600, 'name': PowerOnVM_Task, 'duration_secs': 1.047591} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.609753] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5288760d-bc0a-2c32-5814-0c7fb72b45ae, 'name': SearchDatastore_Task, 'duration_secs': 0.017294} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.609973] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773601, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.690956} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.610275] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1337.610508] env[63371]: INFO nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Took 10.54 seconds to spawn the instance on the hypervisor. [ 1337.610712] env[63371]: DEBUG nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1337.611823] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cbcdfe1a-86a4-4a12-99b5-44d291d41769/cbcdfe1a-86a4-4a12-99b5-44d291d41769.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1337.611963] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1337.612952] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8de5e0-9ac6-42c2-8a8a-eeda75162502 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.616262] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b5971a6-ac88-4eb7-8fb2-820543bb2863 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.618457] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7bc5eae-bc5b-46bc-9ce7-a0a3f9775fe5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.632096] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1337.632096] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]527e0f0d-ce3c-1447-19c0-4ab34b789e73" [ 1337.632096] env[63371]: _type = "Task" [ 1337.632096] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.632324] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1337.632324] env[63371]: value = "task-1773602" [ 1337.632324] env[63371]: _type = "Task" [ 1337.632324] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.645851] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773602, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.650715] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527e0f0d-ce3c-1447-19c0-4ab34b789e73, 'name': SearchDatastore_Task, 'duration_secs': 0.01632} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1337.650947] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1337.651114] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1/cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1337.651398] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.651666] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1337.651931] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b3c1869-075a-4b67-9b6f-9de0cb2ac936 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.654765] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d45fb70-8aa3-466c-98db-2326d14a7a95 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.665375] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1337.665375] env[63371]: value = "task-1773603" [ 1337.665375] env[63371]: _type = "Task" [ 1337.665375] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.666914] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1337.667044] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1337.673165] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28a5aed9-36a6-48e9-afbf-b111f99535ff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1337.682208] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773603, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.682498] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1337.682498] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523ab898-43c9-e7ea-843e-dcca47fcf842" [ 1337.682498] env[63371]: _type = "Task" [ 1337.682498] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1337.695728] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523ab898-43c9-e7ea-843e-dcca47fcf842, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1337.708789] env[63371]: DEBUG nova.compute.manager [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Received event network-changed-a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1337.708789] env[63371]: DEBUG nova.compute.manager [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Refreshing instance network info cache due to event network-changed-a2807b8c-5895-474a-9c75-58bd21982409. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1337.708789] env[63371]: DEBUG oslo_concurrency.lockutils [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] Acquiring lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1337.708789] env[63371]: DEBUG oslo_concurrency.lockutils [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] Acquired lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1337.708789] env[63371]: DEBUG nova.network.neutron [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Refreshing network info cache for port a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1338.079357] env[63371]: DEBUG nova.compute.utils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1338.081371] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1338.081627] env[63371]: DEBUG nova.network.neutron [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1338.091538] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1338.095671] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.156732] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773602, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103003} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.157335] env[63371]: INFO nova.compute.manager [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Took 18.09 seconds to build instance. [ 1338.160942] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1338.162942] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbcc126-1c12-468b-9504-6c7adea8a311 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.192081] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] cbcdfe1a-86a4-4a12-99b5-44d291d41769/cbcdfe1a-86a4-4a12-99b5-44d291d41769.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1338.198112] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e414c804-0ca3-46ed-9734-ac2008fbb688 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.223541] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773603, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.230572] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523ab898-43c9-e7ea-843e-dcca47fcf842, 'name': SearchDatastore_Task, 'duration_secs': 0.015052} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.233602] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1338.233602] env[63371]: value = "task-1773604" [ 1338.233602] env[63371]: _type = "Task" [ 1338.233602] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.233602] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed4c73f9-3a5c-4b96-9d1b-4fcd57182d12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.253684] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1338.253684] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b038fc-8d1b-5a49-a8eb-2db9b829171d" [ 1338.253684] env[63371]: _type = "Task" [ 1338.253684] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.253684] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773604, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.269220] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b038fc-8d1b-5a49-a8eb-2db9b829171d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.356452] env[63371]: DEBUG nova.policy [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42d2f2710cc949ad9ffb24b9474bd8b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '014cf08938b14b169e45f01c87f33d23', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1338.586036] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1338.622162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1338.663813] env[63371]: DEBUG oslo_concurrency.lockutils [None req-97f10ad8-e76f-4309-a1c9-9c797efb0eb9 tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.605s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1338.686157] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773603, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.958554} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.687985] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1/cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1338.687985] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1338.695736] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b72a80e0-5e32-4ed2-bfa6-337e2f71bcb9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.704532] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1338.704532] env[63371]: value = "task-1773605" [ 1338.704532] env[63371]: _type = "Task" [ 1338.704532] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.720975] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773605, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.732814] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94c4fc9f-1095-48d6-b3d3-95054e81b370 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.762501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15df00ea-1b51-46d9-9895-ab74db7e4e18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.766193] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773604, 'name': ReconfigVM_Task, 'duration_secs': 0.509226} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.766483] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Reconfigured VM instance instance-0000000d to attach disk [datastore1] cbcdfe1a-86a4-4a12-99b5-44d291d41769/cbcdfe1a-86a4-4a12-99b5-44d291d41769.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1338.768294] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6fba06b-f62b-4306-9ce5-3ad395fae0d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.773545] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b038fc-8d1b-5a49-a8eb-2db9b829171d, 'name': SearchDatastore_Task, 'duration_secs': 0.075868} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1338.800257] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1338.800902] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cfbd0c7c-243e-497a-acb1-ab9323c23574/cfbd0c7c-243e-497a-acb1-ab9323c23574.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1338.802068] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1338.802068] env[63371]: value = "task-1773606" [ 1338.802068] env[63371]: _type = "Task" [ 1338.802068] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.802479] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12a164b0-709d-4eb1-aaac-1c5ad7f4de2c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.805404] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad9ac44a-75f5-4fe1-92de-efed7c68992b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.821449] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a282e5c-73ce-4b93-8f0a-3296da193096 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.826033] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1338.826033] env[63371]: value = "task-1773607" [ 1338.826033] env[63371]: _type = "Task" [ 1338.826033] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1338.826414] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773606, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1338.840545] env[63371]: DEBUG nova.compute.provider_tree [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1338.848168] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.003394] env[63371]: DEBUG nova.compute.manager [req-ea0f4026-3a5c-4ae4-8ac7-48d1a69be6a8 req-73fde250-cef4-4221-8704-d173aa887840 service nova] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Received event network-vif-deleted-969cd918-b804-4635-a828-8235c720e31b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1339.003609] env[63371]: DEBUG nova.compute.manager [req-ea0f4026-3a5c-4ae4-8ac7-48d1a69be6a8 req-73fde250-cef4-4221-8704-d173aa887840 service nova] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Received event network-vif-deleted-6c410064-2e43-498a-bc47-de2e9ed224f0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1339.167943] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1339.202812] env[63371]: INFO nova.compute.manager [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Rescuing [ 1339.203323] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1339.203526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1339.203695] env[63371]: DEBUG nova.network.neutron [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1339.219546] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773605, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131139} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.220203] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1339.222133] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d08bbcf-08e6-4ca2-90eb-4e87e24728b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.248284] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1/cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1339.248982] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c21f0bc9-e003-422b-bf22-ff6a239edd6c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.276734] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1339.276734] env[63371]: value = "task-1773608" [ 1339.276734] env[63371]: _type = "Task" [ 1339.276734] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.297106] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773608, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.316237] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773606, 'name': Rename_Task, 'duration_secs': 0.16666} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.316879] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1339.316879] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fd7f6de-af51-4051-b01f-71612db059e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.326881] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1339.326881] env[63371]: value = "task-1773609" [ 1339.326881] env[63371]: _type = "Task" [ 1339.326881] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.340368] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773609, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.344714] env[63371]: DEBUG nova.scheduler.client.report [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1339.348182] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773607, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.601539] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1339.640804] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1339.640804] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1339.641279] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1339.642081] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1339.642391] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1339.644805] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1339.644805] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1339.644805] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1339.644805] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1339.644805] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1339.645357] env[63371]: DEBUG nova.virt.hardware [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1339.645357] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d374169-16e3-4800-bf82-e8eb41a99ce9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.656445] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff377230-892c-4ddb-b452-c8fe4d5c69cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.681190] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updated VIF entry in instance network info cache for port 9a4b63df-9697-47a1-81ad-c69476a80975. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1339.681190] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updating instance_info_cache with network_info: [{"id": "9a4b63df-9697-47a1-81ad-c69476a80975", "address": "fa:16:3e:ca:f3:37", "network": {"id": "5b9593c6-3e8e-4b0f-ad69-daf3e2419d2c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-90261722-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6e7f96aff7d240469616d256291f7081", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11032cc2-b275-48d2-9c40-9455ea7d49e3", "external-id": "nsx-vlan-transportzone-226", "segmentation_id": 226, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a4b63df-96", "ovs_interfaceid": "9a4b63df-9697-47a1-81ad-c69476a80975", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.712317] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1339.789415] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773608, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.808147] env[63371]: DEBUG nova.network.neutron [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updated VIF entry in instance network info cache for port a2807b8c-5895-474a-9c75-58bd21982409. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1339.809109] env[63371]: DEBUG nova.network.neutron [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updating instance_info_cache with network_info: [{"id": "a2807b8c-5895-474a-9c75-58bd21982409", "address": "fa:16:3e:c2:68:79", "network": {"id": "6b2f7559-22c6-4657-b126-18f7ace337d5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1011247410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c99d37d52edb40f99efb471da50f5845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2807b8c-58", "ovs_interfaceid": "a2807b8c-5895-474a-9c75-58bd21982409", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1339.841779] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.77461} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1339.845067] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cfbd0c7c-243e-497a-acb1-ab9323c23574/cfbd0c7c-243e-497a-acb1-ab9323c23574.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1339.845296] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1339.845873] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773609, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.846098] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bd551962-da37-46bb-9354-bf1a49c5aeeb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.850718] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.279s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1339.853609] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.255s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1339.855993] env[63371]: INFO nova.compute.claims [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1339.859935] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1339.859935] env[63371]: value = "task-1773610" [ 1339.859935] env[63371]: _type = "Task" [ 1339.859935] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1339.870818] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773610, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1339.879058] env[63371]: INFO nova.scheduler.client.report [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Deleted allocations for instance 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094 [ 1340.188404] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Releasing lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.189121] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Received event network-vif-plugged-1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1340.189121] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquiring lock "cfbd0c7c-243e-497a-acb1-ab9323c23574-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.189121] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.189469] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.189469] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] No waiting events found dispatching network-vif-plugged-1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1340.192211] env[63371]: WARNING nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Received unexpected event network-vif-plugged-1846a8cd-46dc-4187-af60-d4e4eee750dc for instance with vm_state building and task_state spawning. [ 1340.192211] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Received event network-changed-1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1340.192211] env[63371]: DEBUG nova.compute.manager [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Refreshing instance network info cache due to event network-changed-1846a8cd-46dc-4187-af60-d4e4eee750dc. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1340.192211] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquiring lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1340.192211] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Acquired lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1340.193151] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Refreshing network info cache for port 1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1340.223541] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "852e14a7-2f9f-421c-9804-56c885885c7d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.223819] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "852e14a7-2f9f-421c-9804-56c885885c7d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.294617] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773608, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.313460] env[63371]: DEBUG nova.network.neutron [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Successfully created port: 54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1340.317416] env[63371]: DEBUG oslo_concurrency.lockutils [req-10b3e302-389a-414d-99eb-0e20db796b62 req-3a7967e3-f987-45d3-a1e2-571f9c99d7da service nova] Releasing lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1340.348053] env[63371]: DEBUG oslo_vmware.api [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773609, 'name': PowerOnVM_Task, 'duration_secs': 0.642935} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.348446] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1340.348673] env[63371]: INFO nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Took 10.83 seconds to spawn the instance on the hypervisor. [ 1340.348868] env[63371]: DEBUG nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1340.350836] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98d772a-882b-4086-8560-eaf8a897fd5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.377837] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773610, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.167357} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.378200] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1340.379126] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f398a5-af44-4d0e-8964-5a1a606c4348 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.408328] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] cfbd0c7c-243e-497a-acb1-ab9323c23574/cfbd0c7c-243e-497a-acb1-ab9323c23574.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1340.409970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2822ff22-6053-4caa-b8a5-4b083edfbdcd tempest-ServerDiagnosticsTest-536238137 tempest-ServerDiagnosticsTest-536238137-project-member] Lock "3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.823s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1340.411987] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acc11d21-1734-49f4-a1dd-05145822c7db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.439268] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1340.439268] env[63371]: value = "task-1773611" [ 1340.439268] env[63371]: _type = "Task" [ 1340.439268] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.450799] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773611, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.480079] env[63371]: DEBUG nova.network.neutron [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Updating instance_info_cache with network_info: [{"id": "17aee217-e9ac-4d12-8821-73130231a498", "address": "fa:16:3e:02:c3:2a", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17aee217-e9", "ovs_interfaceid": "17aee217-e9ac-4d12-8821-73130231a498", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1340.799732] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773608, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.878938] env[63371]: INFO nova.compute.manager [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Took 20.46 seconds to build instance. [ 1340.943467] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1340.943467] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1340.955686] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773611, 'name': ReconfigVM_Task, 'duration_secs': 0.466784} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1340.958778] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Reconfigured VM instance instance-0000000e to attach disk [datastore1] cfbd0c7c-243e-497a-acb1-ab9323c23574/cfbd0c7c-243e-497a-acb1-ab9323c23574.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1340.959759] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a79980e-d677-4369-bfa7-6032df649127 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1340.967855] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1340.967855] env[63371]: value = "task-1773612" [ 1340.967855] env[63371]: _type = "Task" [ 1340.967855] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1340.982795] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773612, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1340.983298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "refresh_cache-33cf00ea-3195-41cf-9b7a-a8e64496a122" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1341.297169] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773608, 'name': ReconfigVM_Task, 'duration_secs': 1.852063} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.297169] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Reconfigured VM instance instance-0000000f to attach disk [datastore1] cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1/cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1341.297169] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73cf9181-4349-4b2a-b66f-18674a232b81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.305575] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1341.305575] env[63371]: value = "task-1773613" [ 1341.305575] env[63371]: _type = "Task" [ 1341.305575] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.321310] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773613, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.352034] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b1a0ed-9697-4c7b-a793-f973ef8043b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.360492] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1310c9-6700-4b19-9f93-e3d70302479a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.398050] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f9d79845-1ca0-4245-9db4-334081cdbf9b tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.641s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1341.400307] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b7da57-fbce-4627-849e-ad2b22f77013 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.409593] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9caf16f8-fe8b-4322-bf11-4e04f88a6f30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.430019] env[63371]: DEBUG nova.compute.provider_tree [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1341.450231] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Updated VIF entry in instance network info cache for port 1846a8cd-46dc-4187-af60-d4e4eee750dc. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1341.450802] env[63371]: DEBUG nova.network.neutron [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Updating instance_info_cache with network_info: [{"id": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "address": "fa:16:3e:81:c5:8b", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1846a8cd-46", "ovs_interfaceid": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1341.479822] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773612, 'name': Rename_Task, 'duration_secs': 0.392224} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.480075] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1341.480343] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb1f1cec-d3ac-4be7-a8f3-d49912feecf9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.487802] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1341.487802] env[63371]: value = "task-1773614" [ 1341.487802] env[63371]: _type = "Task" [ 1341.487802] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.498666] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773614, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.527549] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1341.527863] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a6c91af-5c91-4b25-9007-9a8c220a0515 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.539726] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1341.539726] env[63371]: value = "task-1773615" [ 1341.539726] env[63371]: _type = "Task" [ 1341.539726] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.555079] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773615, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.627090] env[63371]: DEBUG nova.compute.manager [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Received event network-changed-85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1341.628116] env[63371]: DEBUG nova.compute.manager [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Refreshing instance network info cache due to event network-changed-85189d02-f613-4d29-a47a-b7c1ce74c9f3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1341.628116] env[63371]: DEBUG oslo_concurrency.lockutils [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] Acquiring lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1341.628537] env[63371]: DEBUG oslo_concurrency.lockutils [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] Acquired lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1341.628537] env[63371]: DEBUG nova.network.neutron [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Refreshing network info cache for port 85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1341.818714] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773613, 'name': Rename_Task, 'duration_secs': 0.293967} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1341.819077] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1341.819386] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8d8d2f1a-9260-47fd-8dcb-15add03b5a2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.830187] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1341.830187] env[63371]: value = "task-1773616" [ 1341.830187] env[63371]: _type = "Task" [ 1341.830187] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1341.839953] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773616, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1341.906195] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1341.937771] env[63371]: DEBUG nova.scheduler.client.report [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1341.954308] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f3abce5-c738-4610-85f1-4db9286401c5 req-06247ab5-e279-40de-a50b-e98c8796ee06 service nova] Releasing lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1342.000625] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773614, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.051871] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773615, 'name': PowerOffVM_Task, 'duration_secs': 0.237161} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.052833] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1342.053684] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d12b4a-4e35-4e3d-92f3-90d8253fb7c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.081721] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75407826-178c-41ae-8286-96c25b75a137 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.112310] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1342.112587] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a43079c4-18a4-4053-a26b-0c0f540f5b9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.125810] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1342.125810] env[63371]: value = "task-1773617" [ 1342.125810] env[63371]: _type = "Task" [ 1342.125810] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.139269] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1342.140474] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1342.140474] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1342.140474] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1342.140859] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1342.141093] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87352aa9-d945-4fcd-8e8f-de493ffeced5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.152090] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1342.152418] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1342.153134] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a8ac95b-a33e-4726-ab95-93ef2d20a852 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.159804] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1342.159804] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5231fdcb-47f5-774f-222b-64d6f8d4cb21" [ 1342.159804] env[63371]: _type = "Task" [ 1342.159804] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.171055] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5231fdcb-47f5-774f-222b-64d6f8d4cb21, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.346999] env[63371]: DEBUG oslo_vmware.api [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773616, 'name': PowerOnVM_Task, 'duration_secs': 0.474504} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.346999] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1342.347231] env[63371]: INFO nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Took 7.72 seconds to spawn the instance on the hypervisor. [ 1342.348075] env[63371]: DEBUG nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1342.348273] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7b048e-75e8-42ff-87fd-1fc179344ce0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.449221] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1342.450047] env[63371]: DEBUG nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1342.456878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1342.456878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.161s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1342.456878] env[63371]: INFO nova.compute.claims [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1342.502681] env[63371]: DEBUG oslo_vmware.api [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773614, 'name': PowerOnVM_Task, 'duration_secs': 0.723405} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.504649] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1342.505232] env[63371]: INFO nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Took 10.41 seconds to spawn the instance on the hypervisor. [ 1342.505232] env[63371]: DEBUG nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1342.506425] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b2d113-7986-4282-8782-1fe19cc79bf3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.677881] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5231fdcb-47f5-774f-222b-64d6f8d4cb21, 'name': SearchDatastore_Task, 'duration_secs': 0.012641} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1342.678857] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43878fa4-c8a9-4967-a3be-33d8067e7fd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1342.690787] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1342.690787] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521e928a-16e0-2fa3-89c5-fa625d12ffed" [ 1342.690787] env[63371]: _type = "Task" [ 1342.690787] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1342.702463] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521e928a-16e0-2fa3-89c5-fa625d12ffed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1342.837988] env[63371]: DEBUG nova.network.neutron [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updated VIF entry in instance network info cache for port 85189d02-f613-4d29-a47a-b7c1ce74c9f3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1342.838452] env[63371]: DEBUG nova.network.neutron [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updating instance_info_cache with network_info: [{"id": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "address": "fa:16:3e:52:9a:b1", "network": {"id": "c7291076-10b1-479b-a360-a5d60b016548", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-373506764-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7e27f48936d4019bd23bc30cd94f85b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d2f5e5e2-e460-49ce-aa24-232e4a8007af", "external-id": "nsx-vlan-transportzone-503", "segmentation_id": 503, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85189d02-f6", "ovs_interfaceid": "85189d02-f613-4d29-a47a-b7c1ce74c9f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1342.874873] env[63371]: INFO nova.compute.manager [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Took 20.46 seconds to build instance. [ 1342.961847] env[63371]: DEBUG nova.compute.utils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1342.966260] env[63371]: DEBUG nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Not allocating networking since 'none' was specified. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1343.042904] env[63371]: INFO nova.compute.manager [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Took 20.68 seconds to build instance. [ 1343.086770] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1343.087393] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1343.210453] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521e928a-16e0-2fa3-89c5-fa625d12ffed, 'name': SearchDatastore_Task, 'duration_secs': 0.012436} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1343.210878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.212031] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. {{(pid=63371) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1343.212031] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce2c71c2-9a21-4d75-8e37-9ca271dfc64a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1343.223187] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1343.223187] env[63371]: value = "task-1773618" [ 1343.223187] env[63371]: _type = "Task" [ 1343.223187] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1343.235773] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.342121] env[63371]: DEBUG oslo_concurrency.lockutils [req-51e54d1a-58d6-4110-aa3e-fbb7a6df3919 req-fb67e362-c538-4cec-b0b3-8afe44d4ee9e service nova] Releasing lock "refresh_cache-a43fed87-5205-4148-834e-66778a90b7bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1343.377145] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a3654d08-5ce6-45cb-95da-67f448965f1d tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.341s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.465085] env[63371]: DEBUG nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1343.545116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-758fc2da-f01a-41a4-a070-4099839ed63e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.838s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1343.632269] env[63371]: DEBUG nova.network.neutron [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Successfully updated port: 54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1343.736247] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773618, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1343.881407] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1344.027268] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5c4e90-1822-446f-89b8-225d0bd6d3d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.036641] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d9dd46-97cd-4d81-9bbd-acdc81127c26 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.077663] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1344.081647] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf1170d-ca62-4a45-b446-9f2ac291d98c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.094081] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2fbdf2-c66b-48c4-a442-9d67af5ead2f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.109882] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1344.135814] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "refresh_cache-362d8303-524a-457a-b8d9-2bad87fa816b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1344.135814] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquired lock "refresh_cache-362d8303-524a-457a-b8d9-2bad87fa816b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1344.136086] env[63371]: DEBUG nova.network.neutron [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1344.237563] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773618, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.841086} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1344.238054] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. [ 1344.240025] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2b5683-3cf8-4a95-92f0-f6d99843b132 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.269867] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1344.270636] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-687cc8a9-61e2-4ac3-8bd6-df71dc23e585 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.295060] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1344.295060] env[63371]: value = "task-1773619" [ 1344.295060] env[63371]: _type = "Task" [ 1344.295060] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.307593] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773619, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.321262] env[63371]: DEBUG nova.compute.manager [req-195ff33d-c248-473b-901e-b216c2fe0a25 req-db4c3b33-6c98-4174-a836-c30b05d9b48d service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Received event network-vif-plugged-54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1344.321449] env[63371]: DEBUG oslo_concurrency.lockutils [req-195ff33d-c248-473b-901e-b216c2fe0a25 req-db4c3b33-6c98-4174-a836-c30b05d9b48d service nova] Acquiring lock "362d8303-524a-457a-b8d9-2bad87fa816b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.321664] env[63371]: DEBUG oslo_concurrency.lockutils [req-195ff33d-c248-473b-901e-b216c2fe0a25 req-db4c3b33-6c98-4174-a836-c30b05d9b48d service nova] Lock "362d8303-524a-457a-b8d9-2bad87fa816b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1344.321818] env[63371]: DEBUG oslo_concurrency.lockutils [req-195ff33d-c248-473b-901e-b216c2fe0a25 req-db4c3b33-6c98-4174-a836-c30b05d9b48d service nova] Lock "362d8303-524a-457a-b8d9-2bad87fa816b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1344.322014] env[63371]: DEBUG nova.compute.manager [req-195ff33d-c248-473b-901e-b216c2fe0a25 req-db4c3b33-6c98-4174-a836-c30b05d9b48d service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] No waiting events found dispatching network-vif-plugged-54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1344.322398] env[63371]: WARNING nova.compute.manager [req-195ff33d-c248-473b-901e-b216c2fe0a25 req-db4c3b33-6c98-4174-a836-c30b05d9b48d service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Received unexpected event network-vif-plugged-54ac14c5-812a-455e-88ff-92040c426688 for instance with vm_state building and task_state spawning. [ 1344.413417] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.484230] env[63371]: DEBUG nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1344.512150] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1344.513576] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1344.513576] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1344.513576] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1344.513576] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1344.513576] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1344.513803] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1344.513953] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1344.514143] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1344.515897] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1344.516119] env[63371]: DEBUG nova.virt.hardware [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1344.517010] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87040b98-d705-49dd-8e4c-ac84362bc23f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.528131] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c284b4d-16b0-4edd-9881-9d3f45713a1a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.547325] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1344.554851] env[63371]: DEBUG oslo.service.loopingcall [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1344.555068] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1344.555306] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ffae8e1-b2e5-411b-9370-5b22626a18cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.583673] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1344.583673] env[63371]: value = "task-1773620" [ 1344.583673] env[63371]: _type = "Task" [ 1344.583673] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.601880] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773620, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1344.624626] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1344.635068] env[63371]: ERROR nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [req-482c8083-3b4f-444c-b529-5c1eae5159a4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-482c8083-3b4f-444c-b529-5c1eae5159a4"}]} [ 1344.654455] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1344.675934] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1344.675934] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1344.690106] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: 83f2afd9-956a-4c8c-9f08-b65141062b17 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1344.711783] env[63371]: DEBUG nova.network.neutron [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1344.720107] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1344.805623] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773619, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.085281] env[63371]: DEBUG nova.network.neutron [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Updating instance_info_cache with network_info: [{"id": "54ac14c5-812a-455e-88ff-92040c426688", "address": "fa:16:3e:e5:0b:b8", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54ac14c5-81", "ovs_interfaceid": "54ac14c5-812a-455e-88ff-92040c426688", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1345.108145] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773620, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.282170] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c201718d-39db-4be3-a461-9501b3d30417 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.292136] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcbc751-d3e1-4630-9ef1-a87c58a3e17c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.336571] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcdf91a6-16ff-4849-aa45-6537734a71e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.339682] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773619, 'name': ReconfigVM_Task, 'duration_secs': 0.644077} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.340432] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1345.341749] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4d5d63-3b08-42b7-b9f9-7d1de96fb588 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.348429] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cf9c5d-e976-4496-bbbc-c575e6c72e1a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.375417] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0f45737-140e-41fd-bd24-29bc076e9547 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.394754] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1345.404534] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1345.404534] env[63371]: value = "task-1773621" [ 1345.404534] env[63371]: _type = "Task" [ 1345.404534] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.416012] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773621, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.598280] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Releasing lock "refresh_cache-362d8303-524a-457a-b8d9-2bad87fa816b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1345.598617] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Instance network_info: |[{"id": "54ac14c5-812a-455e-88ff-92040c426688", "address": "fa:16:3e:e5:0b:b8", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54ac14c5-81", "ovs_interfaceid": "54ac14c5-812a-455e-88ff-92040c426688", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1345.598869] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773620, 'name': CreateVM_Task, 'duration_secs': 0.769784} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.599283] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:0b:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '54ac14c5-812a-455e-88ff-92040c426688', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1345.609147] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Creating folder: Project (014cf08938b14b169e45f01c87f33d23). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1345.609147] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1345.609147] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ad982a5-5c92-4245-8474-7ce616018bab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.610339] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1345.610518] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.611049] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1345.611362] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ed03e96-89e7-476f-a072-68941a434385 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.618355] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1345.618355] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c48c71-c435-7061-f40a-35760197f6d7" [ 1345.618355] env[63371]: _type = "Task" [ 1345.618355] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.630137] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c48c71-c435-7061-f40a-35760197f6d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.631704] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Created folder: Project (014cf08938b14b169e45f01c87f33d23) in parent group-v368199. [ 1345.631704] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Creating folder: Instances. Parent ref: group-v368248. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1345.631931] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80217310-133e-4f71-b22f-7ab505bbe255 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.645031] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Created folder: Instances in parent group-v368248. [ 1345.645303] env[63371]: DEBUG oslo.service.loopingcall [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1345.645568] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1345.645802] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-faf52193-2230-44a3-8e93-3b84ec204e8c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.668369] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1345.668369] env[63371]: value = "task-1773624" [ 1345.668369] env[63371]: _type = "Task" [ 1345.668369] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.677154] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773624, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.766724] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1345.767662] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1345.926018] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773621, 'name': ReconfigVM_Task, 'duration_secs': 0.371949} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.926018] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1345.926290] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f8aab75-594c-4215-9be7-4b4a9b146e9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.929545] env[63371]: ERROR nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [req-6763409b-6693-4b87-886c-59ab3091fe3b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6763409b-6693-4b87-886c-59ab3091fe3b"}]} [ 1345.937129] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1345.937129] env[63371]: value = "task-1773625" [ 1345.937129] env[63371]: _type = "Task" [ 1345.937129] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.953492] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773625, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.958641] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1345.980043] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1345.980043] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1345.994958] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: 83f2afd9-956a-4c8c-9f08-b65141062b17 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1346.016738] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1346.132160] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c48c71-c435-7061-f40a-35760197f6d7, 'name': SearchDatastore_Task, 'duration_secs': 0.014481} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.132537] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.133418] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1346.133418] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.133418] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.133418] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1346.133739] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a5d3997-b18c-44c0-a8a0-a26c214e08e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.147911] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1346.147911] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1346.149259] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ac9204b-b0ad-48bf-8bf1-e57e7e9f3bd6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.164607] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1346.164607] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]527185ee-6de6-7799-afae-a795538ad8c5" [ 1346.164607] env[63371]: _type = "Task" [ 1346.164607] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.184372] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "a43fed87-5205-4148-834e-66778a90b7bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.184372] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "a43fed87-5205-4148-834e-66778a90b7bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.187023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "a43fed87-5205-4148-834e-66778a90b7bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.187023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "a43fed87-5205-4148-834e-66778a90b7bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.187023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "a43fed87-5205-4148-834e-66778a90b7bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.187822] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527185ee-6de6-7799-afae-a795538ad8c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.187822] env[63371]: INFO nova.compute.manager [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Terminating instance [ 1346.198396] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773624, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.199333] env[63371]: DEBUG nova.compute.manager [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1346.199538] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1346.200789] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f46566f-cbbe-41b0-8046-f5ef2274b230 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.209904] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1346.209904] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22dd47cd-1abf-4ca0-8db1-fa9b20d6c52c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.220732] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1346.220732] env[63371]: value = "task-1773626" [ 1346.220732] env[63371]: _type = "Task" [ 1346.220732] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.232334] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.454096] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773625, 'name': PowerOnVM_Task} progress is 92%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.596863] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "9249f27a-1985-4be1-947c-e433c7aa26f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.597116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.618589] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125ee5ea-e652-4721-a119-d5224abaed3e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.627634] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa87b077-684e-48c7-83d7-0e0f1735b3df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.674800] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70fe03dd-be13-4efe-b833-08b033e69d41 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.692860] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773624, 'name': CreateVM_Task, 'duration_secs': 0.565206} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.693129] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527185ee-6de6-7799-afae-a795538ad8c5, 'name': SearchDatastore_Task, 'duration_secs': 0.024381} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.694384] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eff3bc3-44e6-4e13-a484-cb5848b32418 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.698596] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1346.700258] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.700258] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.700396] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1346.700521] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04f2e81a-6702-4f37-b3d0-4f74dd6ad6a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.702952] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40c06f67-a3ca-4496-9f85-7a4e77386ed1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.714408] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1346.720246] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1346.720246] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521bf9f0-40ec-69e2-8ac7-b36c66966199" [ 1346.720246] env[63371]: _type = "Task" [ 1346.720246] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.720246] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1346.720246] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f6cdb7-1902-c742-702e-bec3a9cb922a" [ 1346.720246] env[63371]: _type = "Task" [ 1346.720246] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.738446] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521bf9f0-40ec-69e2-8ac7-b36c66966199, 'name': SearchDatastore_Task, 'duration_secs': 0.018443} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.744071] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.744071] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1346.744071] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f6cdb7-1902-c742-702e-bec3a9cb922a, 'name': SearchDatastore_Task, 'duration_secs': 0.017103} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.744071] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773626, 'name': PowerOffVM_Task, 'duration_secs': 0.304496} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.744245] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-22d1d89e-036d-41ef-9936-81bfa90bcae4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.745902] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1346.746049] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1346.746279] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1346.746429] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1346.746597] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1346.746851] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1346.747011] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1346.747256] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69cab130-c0ee-4945-af7a-d56ad736ef1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.749727] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42203731-4aaf-4e3e-8b58-71206e4c5d9d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.758825] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1346.758825] env[63371]: value = "task-1773628" [ 1346.758825] env[63371]: _type = "Task" [ 1346.758825] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.763686] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.765260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.765260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1346.765260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1346.765260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1346.766201] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1346.766899] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1346.768299] env[63371]: INFO nova.compute.manager [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Terminating instance [ 1346.770071] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-302114d9-52fa-453e-a25b-bc124b9f8b00 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.774019] env[63371]: DEBUG nova.compute.manager [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1346.774019] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1346.774150] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1834274d-b1a0-4159-9867-991ab608bb88 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.780376] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773628, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.783609] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1346.783609] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5224aac5-6d78-ca94-8c62-c6234d78ee35" [ 1346.783609] env[63371]: _type = "Task" [ 1346.783609] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.791112] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1346.791112] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-958fe09d-aba6-4cf4-8fc9-aef61800730d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.795861] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5224aac5-6d78-ca94-8c62-c6234d78ee35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.798429] env[63371]: DEBUG oslo_vmware.api [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1346.798429] env[63371]: value = "task-1773629" [ 1346.798429] env[63371]: _type = "Task" [ 1346.798429] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.806747] env[63371]: DEBUG oslo_vmware.api [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773629, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.847039] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1346.847505] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1346.847629] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Deleting the datastore file [datastore1] a43fed87-5205-4148-834e-66778a90b7bc {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1346.847823] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b390be3-eed3-47ad-b059-f539221f673d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.858123] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for the task: (returnval){ [ 1346.858123] env[63371]: value = "task-1773630" [ 1346.858123] env[63371]: _type = "Task" [ 1346.858123] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.869591] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.952874] env[63371]: DEBUG oslo_vmware.api [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773625, 'name': PowerOnVM_Task, 'duration_secs': 0.612868} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.953180] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1346.957129] env[63371]: DEBUG nova.compute.manager [None req-f7efd390-d069-4a1b-9fcb-1292cf3a6ad7 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1346.958064] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4688bd7-74ff-4074-9715-d9523a98b39b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.241581] env[63371]: ERROR nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [req-4e4fa56d-d030-465f-9b40-6569e601d3ff] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4e4fa56d-d030-465f-9b40-6569e601d3ff"}]} [ 1347.259594] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1347.272853] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773628, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.277310] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1347.277310] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1347.295923] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5224aac5-6d78-ca94-8c62-c6234d78ee35, 'name': SearchDatastore_Task, 'duration_secs': 0.014679} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.297381] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1347.301648] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31238640-2566-418e-86c2-045a9d67fa1f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.316058] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1347.316058] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526e1f0c-488d-35d2-e75a-0035bf4f6ffa" [ 1347.316058] env[63371]: _type = "Task" [ 1347.316058] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.317910] env[63371]: DEBUG oslo_vmware.api [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773629, 'name': PowerOffVM_Task, 'duration_secs': 0.256929} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.322059] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1347.322460] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1347.323712] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1347.325987] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52e0d4a0-22e2-4e07-9cc7-3f060b463b33 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.338044] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526e1f0c-488d-35d2-e75a-0035bf4f6ffa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.373526] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773630, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.419920] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1347.419920] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1347.419920] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Deleting the datastore file [datastore1] 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1347.419920] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-62a98648-4d0c-4fb9-be41-a4d7e48f9948 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.427023] env[63371]: DEBUG oslo_vmware.api [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for the task: (returnval){ [ 1347.427023] env[63371]: value = "task-1773632" [ 1347.427023] env[63371]: _type = "Task" [ 1347.427023] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.435176] env[63371]: DEBUG oslo_vmware.api [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773632, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.474878] env[63371]: DEBUG nova.compute.manager [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Received event network-changed-54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1347.475099] env[63371]: DEBUG nova.compute.manager [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Refreshing instance network info cache due to event network-changed-54ac14c5-812a-455e-88ff-92040c426688. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1347.475342] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Acquiring lock "refresh_cache-362d8303-524a-457a-b8d9-2bad87fa816b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1347.475485] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Acquired lock "refresh_cache-362d8303-524a-457a-b8d9-2bad87fa816b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1347.475653] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Refreshing network info cache for port 54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1347.781710] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773628, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644866} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.781808] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1347.782027] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1347.783026] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b2feb2a2-f229-4893-ab97-7d3404fbb7ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.794190] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1347.794190] env[63371]: value = "task-1773633" [ 1347.794190] env[63371]: _type = "Task" [ 1347.794190] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.810481] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773633, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.835753] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526e1f0c-488d-35d2-e75a-0035bf4f6ffa, 'name': SearchDatastore_Task, 'duration_secs': 0.052828} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.835860] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1347.836300] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 362d8303-524a-457a-b8d9-2bad87fa816b/362d8303-524a-457a-b8d9-2bad87fa816b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1347.837039] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f2b626ed-30ed-477c-95c6-f1a5285cf1ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.848494] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1347.848494] env[63371]: value = "task-1773634" [ 1347.848494] env[63371]: _type = "Task" [ 1347.848494] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.862093] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.875313] env[63371]: DEBUG oslo_vmware.api [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Task: {'id': task-1773630, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.527753} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.875666] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1347.875873] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1347.876200] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1347.876290] env[63371]: INFO nova.compute.manager [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1347.876835] env[63371]: DEBUG oslo.service.loopingcall [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1347.876835] env[63371]: DEBUG nova.compute.manager [-] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1347.876835] env[63371]: DEBUG nova.network.neutron [-] [instance: a43fed87-5205-4148-834e-66778a90b7bc] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1347.901478] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2da0f80-0d9a-4076-a1f3-a15f3bb5fe7d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.910868] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db7fa171-d710-486e-bb74-377455df097a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.951204] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c115d7fc-204b-48f1-b246-6e29ca5ad009 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.963490] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0da369e-4410-4777-9f72-23759413bd6e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.967548] env[63371]: DEBUG oslo_vmware.api [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Task: {'id': task-1773632, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.374584} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.967820] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1347.967998] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1347.968193] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1347.968393] env[63371]: INFO nova.compute.manager [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1347.968650] env[63371]: DEBUG oslo.service.loopingcall [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1347.969271] env[63371]: DEBUG nova.compute.manager [-] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1347.969619] env[63371]: DEBUG nova.network.neutron [-] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1347.982575] env[63371]: DEBUG nova.compute.provider_tree [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1348.305590] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773633, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088205} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.306060] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1348.309043] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131c165d-2d6e-4a4f-b39d-d18433fe1923 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.337281] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1348.337281] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02fa12b4-ab10-48ab-8376-cb6879f69e9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.366480] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773634, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.367940] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1348.367940] env[63371]: value = "task-1773635" [ 1348.367940] env[63371]: _type = "Task" [ 1348.367940] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.378738] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773635, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.487268] env[63371]: DEBUG nova.scheduler.client.report [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1348.820712] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Updated VIF entry in instance network info cache for port 54ac14c5-812a-455e-88ff-92040c426688. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1348.821214] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Updating instance_info_cache with network_info: [{"id": "54ac14c5-812a-455e-88ff-92040c426688", "address": "fa:16:3e:e5:0b:b8", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.119", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap54ac14c5-81", "ovs_interfaceid": "54ac14c5-812a-455e-88ff-92040c426688", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.866981] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773634, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.879685] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773635, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.994166] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.537s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1348.994166] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1348.996412] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.697s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1348.997052] env[63371]: DEBUG nova.objects.instance [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lazy-loading 'resources' on Instance uuid ca202079-2eae-441e-80f6-e403497e137d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1349.255064] env[63371]: DEBUG nova.network.neutron [-] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.324978] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Releasing lock "refresh_cache-362d8303-524a-457a-b8d9-2bad87fa816b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1349.325285] env[63371]: DEBUG nova.compute.manager [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Received event network-changed-f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1349.325489] env[63371]: DEBUG nova.compute.manager [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Refreshing instance network info cache due to event network-changed-f65a228f-d220-4478-a274-65cee7a3df3c. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1349.325803] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Acquiring lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1349.325957] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Acquired lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1349.326136] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Refreshing network info cache for port f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1349.343632] env[63371]: DEBUG nova.network.neutron [-] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1349.369403] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773634, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.024637} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.373636] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 362d8303-524a-457a-b8d9-2bad87fa816b/362d8303-524a-457a-b8d9-2bad87fa816b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1349.373976] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1349.374300] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c195894-33a1-4d14-8798-163727a8e551 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.383173] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773635, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.384556] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1349.384556] env[63371]: value = "task-1773636" [ 1349.384556] env[63371]: _type = "Task" [ 1349.384556] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.395958] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773636, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.497896] env[63371]: DEBUG nova.compute.utils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1349.499167] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1349.499376] env[63371]: DEBUG nova.network.neutron [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1349.648236] env[63371]: DEBUG nova.policy [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e49a0ae65b9b4f878930641771fec10f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4994abb8da3b4a018414c60719a056b9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1349.725965] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "b48a8e83-e581-4886-833b-bbce155d40d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1349.726393] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "b48a8e83-e581-4886-833b-bbce155d40d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1349.757741] env[63371]: INFO nova.compute.manager [-] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Took 1.88 seconds to deallocate network for instance. [ 1349.848170] env[63371]: INFO nova.compute.manager [-] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Took 1.88 seconds to deallocate network for instance. [ 1349.885904] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773635, 'name': ReconfigVM_Task, 'duration_secs': 1.036358} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.889411] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1349.890410] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6f324953-a1cc-465f-8054-d5ea8246b2f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.904124] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773636, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077704} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.904449] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1349.904449] env[63371]: value = "task-1773637" [ 1349.904449] env[63371]: _type = "Task" [ 1349.904449] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.904667] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1349.905870] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d994874-ccc5-4a51-905e-383479ea78ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.949120] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 362d8303-524a-457a-b8d9-2bad87fa816b/362d8303-524a-457a-b8d9-2bad87fa816b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1349.949313] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773637, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.952376] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12345c9b-0a13-4011-a0c2-55649b1d8ae3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.977915] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1349.977915] env[63371]: value = "task-1773638" [ 1349.977915] env[63371]: _type = "Task" [ 1349.977915] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1349.991073] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773638, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.004978] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1350.086244] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfebb3a-5643-4ef9-a82b-8a843a7d66f5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.097501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2486b7b-2c6c-4e9b-8dad-cc71eb3c56d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.137401] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0124417-1b60-413e-abf5-a5634a284012 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.146624] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef3dd92-5d7c-43bf-b438-21ed125465bc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.161978] env[63371]: DEBUG nova.compute.provider_tree [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.269441] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.330305] env[63371]: DEBUG nova.network.neutron [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Successfully created port: 4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1350.354127] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updated VIF entry in instance network info cache for port f65a228f-d220-4478-a274-65cee7a3df3c. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1350.354503] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updating instance_info_cache with network_info: [{"id": "f65a228f-d220-4478-a274-65cee7a3df3c", "address": "fa:16:3e:60:b5:56", "network": {"id": "c7fb5c0c-158e-4552-8360-a944e8eff32d", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-951711523-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1f0a4db7d709461ca32a5dc0ebabdf31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69744f59-ecac-4b0b-831e-82a274d7acbb", "external-id": "nsx-vlan-transportzone-770", "segmentation_id": 770, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf65a228f-d2", "ovs_interfaceid": "f65a228f-d220-4478-a274-65cee7a3df3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.357117] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1350.420785] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773637, 'name': Rename_Task, 'duration_secs': 0.238471} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.421040] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1350.424998] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7141b60c-eec4-4092-b0a0-2c10332d9121 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.433691] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1350.433691] env[63371]: value = "task-1773639" [ 1350.433691] env[63371]: _type = "Task" [ 1350.433691] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1350.447922] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773639, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.489620] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773638, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1350.647582] env[63371]: DEBUG nova.compute.manager [req-87c0a5d6-a3e2-4e55-b652-3e51a1da9de3 req-c1f23d29-4504-4919-a0fa-97f66ba0613c service nova] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Received event network-vif-deleted-85189d02-f613-4d29-a47a-b7c1ce74c9f3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1350.648174] env[63371]: DEBUG nova.compute.manager [req-87c0a5d6-a3e2-4e55-b652-3e51a1da9de3 req-c1f23d29-4504-4919-a0fa-97f66ba0613c service nova] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Received event network-vif-deleted-358a8d7d-459f-49a9-b3c7-0cf811dd7e54 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1350.667131] env[63371]: DEBUG nova.scheduler.client.report [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1350.859275] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Releasing lock "refresh_cache-cbcdfe1a-86a4-4a12-99b5-44d291d41769" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1350.859653] env[63371]: DEBUG nova.compute.manager [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Received event network-changed-1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1350.859831] env[63371]: DEBUG nova.compute.manager [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Refreshing instance network info cache due to event network-changed-1846a8cd-46dc-4187-af60-d4e4eee750dc. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1350.860079] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Acquiring lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1350.860243] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Acquired lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1350.860409] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Refreshing network info cache for port 1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1350.947778] env[63371]: DEBUG oslo_vmware.api [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773639, 'name': PowerOnVM_Task, 'duration_secs': 0.479019} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.948309] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1350.948633] env[63371]: INFO nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Took 6.46 seconds to spawn the instance on the hypervisor. [ 1350.948752] env[63371]: DEBUG nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1350.949867] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cebdc21c-c61b-4612-a398-e671345d047f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.991444] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773638, 'name': ReconfigVM_Task, 'duration_secs': 0.615184} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1350.992085] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 362d8303-524a-457a-b8d9-2bad87fa816b/362d8303-524a-457a-b8d9-2bad87fa816b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1350.992667] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dad0185e-5064-4a05-9230-8435a6764db7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.002207] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1351.002207] env[63371]: value = "task-1773640" [ 1351.002207] env[63371]: _type = "Task" [ 1351.002207] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.013742] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773640, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.017529] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1351.053368] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1351.053368] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1351.053368] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1351.053535] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1351.054206] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1351.054556] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1351.055041] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1351.057156] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1351.057156] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1351.057156] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1351.057156] env[63371]: DEBUG nova.virt.hardware [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1351.057156] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cc45a0-34dc-48c3-b775-9a40f81c19de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.069295] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ede1186-7fae-4ff4-8324-c05d3ff3cfea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.174018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.175s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.175251] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.300s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1351.178390] env[63371]: INFO nova.compute.claims [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1351.202240] env[63371]: INFO nova.scheduler.client.report [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Deleted allocations for instance ca202079-2eae-441e-80f6-e403497e137d [ 1351.475088] env[63371]: INFO nova.compute.manager [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Took 22.91 seconds to build instance. [ 1351.513938] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773640, 'name': Rename_Task, 'duration_secs': 0.385717} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1351.514516] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1351.514921] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a0041bb7-cb4d-4964-983b-116669397547 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.523785] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1351.523785] env[63371]: value = "task-1773641" [ 1351.523785] env[63371]: _type = "Task" [ 1351.523785] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.534909] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773641, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.718248] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bd96483e-58b1-4f84-bee2-1fb0d6615ad0 tempest-ServerDiagnosticsNegativeTest-1989539863 tempest-ServerDiagnosticsNegativeTest-1989539863-project-member] Lock "ca202079-2eae-441e-80f6-e403497e137d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.514s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1351.926776] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Updated VIF entry in instance network info cache for port 1846a8cd-46dc-4187-af60-d4e4eee750dc. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1351.927230] env[63371]: DEBUG nova.network.neutron [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Updating instance_info_cache with network_info: [{"id": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "address": "fa:16:3e:81:c5:8b", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1846a8cd-46", "ovs_interfaceid": "1846a8cd-46dc-4187-af60-d4e4eee750dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1351.976863] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8ca80cd-3222-4618-af6e-a6abc5f23eac tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "1924d3d2-cc88-4fd2-b509-8463da796658" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.998s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1352.036272] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773641, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.432151] env[63371]: DEBUG oslo_concurrency.lockutils [req-2adf8519-1f16-4834-aaa6-809f752330ba req-8d9ee02d-0d94-454d-af6d-e8102d3baa3f service nova] Releasing lock "refresh_cache-cfbd0c7c-243e-497a-acb1-ab9323c23574" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1352.479833] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1352.498377] env[63371]: DEBUG nova.network.neutron [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Successfully updated port: 4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1352.540887] env[63371]: DEBUG oslo_vmware.api [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773641, 'name': PowerOnVM_Task, 'duration_secs': 0.709241} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.541947] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1352.541947] env[63371]: INFO nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Took 12.94 seconds to spawn the instance on the hypervisor. [ 1352.541947] env[63371]: DEBUG nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1352.542993] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21d4a345-14da-4cf9-895a-e1f71bbfa864 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.712317] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6494053-9c68-4e99-bc00-9a0567b88c6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.723759] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7935de-4ae5-4065-8330-07b24aca0149 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.762035] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96be5cb1-9bdc-44dd-9617-09036974fb19 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.771491] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8ff5c1-fe4f-462c-9fff-07ac2325eb32 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.789417] env[63371]: DEBUG nova.compute.provider_tree [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1353.004684] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.004861] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquired lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1353.005019] env[63371]: DEBUG nova.network.neutron [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1353.023069] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.064709] env[63371]: INFO nova.compute.manager [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Took 26.95 seconds to build instance. [ 1353.323837] env[63371]: ERROR nova.scheduler.client.report [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [req-c5f7441b-5b87-4589-80c8-ec740a645b8f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c5f7441b-5b87-4589-80c8-ec740a645b8f"}]} [ 1353.343061] env[63371]: DEBUG nova.scheduler.client.report [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1353.357836] env[63371]: DEBUG nova.scheduler.client.report [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1353.358124] env[63371]: DEBUG nova.compute.provider_tree [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1353.376822] env[63371]: DEBUG nova.scheduler.client.report [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1353.402734] env[63371]: DEBUG nova.scheduler.client.report [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1353.546829] env[63371]: DEBUG nova.network.neutron [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1353.566683] env[63371]: DEBUG oslo_concurrency.lockutils [None req-13376b1e-011d-4da1-b7a6-8d850ea6b69e tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "362d8303-524a-457a-b8d9-2bad87fa816b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.505s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.581687] env[63371]: INFO nova.compute.manager [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Rebuilding instance [ 1353.657023] env[63371]: DEBUG nova.compute.manager [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1353.657023] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad62be9-a4f7-4291-b466-e4061e4301c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.791241] env[63371]: DEBUG nova.compute.manager [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Received event network-vif-plugged-4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1353.791459] env[63371]: DEBUG oslo_concurrency.lockutils [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] Acquiring lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1353.791795] env[63371]: DEBUG oslo_concurrency.lockutils [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1353.791949] env[63371]: DEBUG oslo_concurrency.lockutils [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1353.792054] env[63371]: DEBUG nova.compute.manager [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] No waiting events found dispatching network-vif-plugged-4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1353.792608] env[63371]: WARNING nova.compute.manager [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Received unexpected event network-vif-plugged-4493eb7b-33d3-4a78-a1dd-3a96c6144850 for instance with vm_state building and task_state spawning. [ 1353.792905] env[63371]: DEBUG nova.compute.manager [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Received event network-changed-4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1353.793157] env[63371]: DEBUG nova.compute.manager [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Refreshing instance network info cache due to event network-changed-4493eb7b-33d3-4a78-a1dd-3a96c6144850. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1353.793396] env[63371]: DEBUG oslo_concurrency.lockutils [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] Acquiring lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1353.813456] env[63371]: DEBUG nova.network.neutron [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Updating instance_info_cache with network_info: [{"id": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "address": "fa:16:3e:19:9f:ce", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4493eb7b-33", "ovs_interfaceid": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1353.977805] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15de9f00-16d0-40cb-8aa7-0e10ba8c9e8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.988344] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797a270c-9075-44c1-bbda-cc04b145a625 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.027342] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e1369a-5058-423c-9d64-994d1ede71a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.036382] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059cc1a4-bdb6-447f-8e45-ba1a73af0021 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.053203] env[63371]: DEBUG nova.compute.provider_tree [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1354.075351] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1354.174953] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1354.174953] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79de4b93-d9d8-4674-ad56-2b3a6b6e9490 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.182827] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1354.182827] env[63371]: value = "task-1773642" [ 1354.182827] env[63371]: _type = "Task" [ 1354.182827] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.194442] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773642, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.316674] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Releasing lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1354.320021] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Instance network_info: |[{"id": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "address": "fa:16:3e:19:9f:ce", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4493eb7b-33", "ovs_interfaceid": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1354.320021] env[63371]: DEBUG oslo_concurrency.lockutils [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] Acquired lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.321265] env[63371]: DEBUG nova.network.neutron [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Refreshing network info cache for port 4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1354.321265] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:9f:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4493eb7b-33d3-4a78-a1dd-3a96c6144850', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1354.328139] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Creating folder: Project (4994abb8da3b4a018414c60719a056b9). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1354.329678] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9f422dd-964f-499e-82d4-dd02a6f503b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.344020] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Created folder: Project (4994abb8da3b4a018414c60719a056b9) in parent group-v368199. [ 1354.344020] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Creating folder: Instances. Parent ref: group-v368251. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1354.344020] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba47e2a2-b29c-47b9-bc00-84cb4515d224 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.353440] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Created folder: Instances in parent group-v368251. [ 1354.353957] env[63371]: DEBUG oslo.service.loopingcall [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1354.354839] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1354.355332] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e778e7ad-03f7-42b6-9ae7-c7300e10905f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.380910] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1354.380910] env[63371]: value = "task-1773645" [ 1354.380910] env[63371]: _type = "Task" [ 1354.380910] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.390014] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773645, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.430750] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1354.599798] env[63371]: DEBUG nova.scheduler.client.report [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 42 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1354.600259] env[63371]: DEBUG nova.compute.provider_tree [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 42 to 43 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1354.600496] env[63371]: DEBUG nova.compute.provider_tree [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1354.605729] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1354.695016] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773642, 'name': PowerOffVM_Task, 'duration_secs': 0.125551} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.695324] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1354.695556] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1354.696415] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1f8688-2b7d-4bf9-bfeb-50f7b69cc921 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.707157] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1354.707157] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4a3ce34b-760c-4fcb-82fa-612d00261d05 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.739481] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1354.740853] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1354.740853] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleting the datastore file [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1354.740853] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34a57d2b-57dd-4db1-a974-541c556cdd71 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.753924] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1354.753924] env[63371]: value = "task-1773647" [ 1354.753924] env[63371]: _type = "Task" [ 1354.753924] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.767446] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773647, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.893738] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773645, 'name': CreateVM_Task, 'duration_secs': 0.459036} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.893927] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1354.894740] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1354.894928] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.895284] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1354.895549] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cce4025-eb8d-4bc9-a901-6ac474a9feed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.901482] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1354.901482] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52207b9f-d0ef-eba1-6d01-d8b1d501c0a2" [ 1354.901482] env[63371]: _type = "Task" [ 1354.901482] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.913507] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52207b9f-d0ef-eba1-6d01-d8b1d501c0a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.934759] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1355.105746] env[63371]: DEBUG nova.network.neutron [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Updated VIF entry in instance network info cache for port 4493eb7b-33d3-4a78-a1dd-3a96c6144850. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1355.105988] env[63371]: DEBUG nova.network.neutron [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Updating instance_info_cache with network_info: [{"id": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "address": "fa:16:3e:19:9f:ce", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4493eb7b-33", "ovs_interfaceid": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.108567] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.933s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1355.108776] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1355.112528] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.876s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1355.112995] env[63371]: DEBUG nova.objects.instance [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lazy-loading 'resources' on Instance uuid fc0715a1-a056-4a1b-a86e-959680effc97 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1355.269025] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773647, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160511} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.269025] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1355.269025] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1355.269836] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1355.417026] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52207b9f-d0ef-eba1-6d01-d8b1d501c0a2, 'name': SearchDatastore_Task, 'duration_secs': 0.0204} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.417026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.417026] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1355.417026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1355.417410] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1355.417410] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1355.417410] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a933038-a48a-4c9d-96aa-9102ee9ffc99 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.429164] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1355.429164] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1355.429164] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db4c4d1a-26d4-44bd-b49d-33906cbf7e61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.437688] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1355.437688] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525df11a-d8bb-efb7-4b0e-06a686648a17" [ 1355.437688] env[63371]: _type = "Task" [ 1355.437688] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.450816] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525df11a-d8bb-efb7-4b0e-06a686648a17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.613450] env[63371]: DEBUG oslo_concurrency.lockutils [req-3d04fd96-172e-423d-a0c2-d59242706f90 req-ca240125-b469-4806-b587-36066fda5111 service nova] Releasing lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1355.616214] env[63371]: DEBUG nova.compute.utils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1355.621348] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1355.621786] env[63371]: DEBUG nova.network.neutron [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1355.708948] env[63371]: DEBUG nova.policy [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38c65e6dd9e4468fb1a0235bac086151', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4ca8a73414142d497ebd3d3f043d9ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1355.951599] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525df11a-d8bb-efb7-4b0e-06a686648a17, 'name': SearchDatastore_Task, 'duration_secs': 0.011819} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.952817] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-270195b3-1fa1-4132-a404-280cae5aa732 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.963602] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1355.963602] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52407d31-7823-09e7-e357-efddee7add01" [ 1355.963602] env[63371]: _type = "Task" [ 1355.963602] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.973852] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52407d31-7823-09e7-e357-efddee7add01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.122982] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1356.150780] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848ef236-a2f5-4bff-a0ac-857ea65edccd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.160350] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175c5595-ab1c-475a-ab42-eec57c240a20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.194711] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c3ffeb-712e-416c-a0a3-c062830c2a3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.203486] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a189c44f-8a49-4a7f-98aa-6d937505b665 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.219446] env[63371]: DEBUG nova.compute.provider_tree [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1356.319264] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1356.319492] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1356.319696] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1356.319794] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1356.319972] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1356.324289] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1356.324460] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1356.324627] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1356.324792] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1356.324952] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1356.325137] env[63371]: DEBUG nova.virt.hardware [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1356.325981] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a92366-5102-49f9-acea-158d0d5fc31d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.336938] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11d9d422-656e-4ea3-9a36-1e3c579d1bee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.354428] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1356.360645] env[63371]: DEBUG oslo.service.loopingcall [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1356.362987] env[63371]: DEBUG nova.network.neutron [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Successfully created port: e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1356.365769] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1356.365769] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c254aa4-50b6-4d5e-b3bd-b39c623c8015 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.393049] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1356.393049] env[63371]: value = "task-1773648" [ 1356.393049] env[63371]: _type = "Task" [ 1356.393049] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.402519] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773648, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.476465] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52407d31-7823-09e7-e357-efddee7add01, 'name': SearchDatastore_Task, 'duration_secs': 0.010069} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.476753] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1356.479804] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7841ebd2-0c23-4e32-8b81-42311a32c6fd/7841ebd2-0c23-4e32-8b81-42311a32c6fd.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1356.479804] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c2cae59-23a8-40a7-afdd-d104ac00e262 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.490146] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1356.490146] env[63371]: value = "task-1773649" [ 1356.490146] env[63371]: _type = "Task" [ 1356.490146] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.501041] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773649, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.723569] env[63371]: DEBUG nova.scheduler.client.report [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1356.908040] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773648, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.007040] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773649, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.138341] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1357.176391] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1357.176391] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1357.176391] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1357.176610] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1357.176646] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1357.176793] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1357.177015] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1357.177515] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1357.177746] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1357.177927] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1357.178470] env[63371]: DEBUG nova.virt.hardware [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1357.182185] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b756886-fd7e-4e63-810a-d5028fcf00c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.192735] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b062e01-616b-4579-b883-25ccdd5358dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.222427] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1357.222837] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.229886] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.118s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.232499] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 20.746s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1357.253518] env[63371]: INFO nova.scheduler.client.report [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted allocations for instance fc0715a1-a056-4a1b-a86e-959680effc97 [ 1357.407958] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773648, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.505165] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773649, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519214} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.505448] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7841ebd2-0c23-4e32-8b81-42311a32c6fd/7841ebd2-0c23-4e32-8b81-42311a32c6fd.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1357.505668] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1357.505933] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01b787a3-f87e-44de-a197-57664d5f7695 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.514179] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1357.514179] env[63371]: value = "task-1773650" [ 1357.514179] env[63371]: _type = "Task" [ 1357.514179] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.524208] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773650, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.740990] env[63371]: INFO nova.compute.claims [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1357.763618] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d6fecbce-9566-44d9-81c1-d5e535ece252 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "fc0715a1-a056-4a1b-a86e-959680effc97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.992s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1357.911406] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773648, 'name': CreateVM_Task, 'duration_secs': 1.382616} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.911844] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1357.912119] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1357.912238] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1357.912561] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1357.912808] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3c45653-dc3a-4380-ac5d-f9350a83002f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.918754] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1357.918754] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526a93c4-2741-b52a-550b-3a9dff5e50ed" [ 1357.918754] env[63371]: _type = "Task" [ 1357.918754] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.930569] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526a93c4-2741-b52a-550b-3a9dff5e50ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.024576] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773650, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081137} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.028888] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1358.028888] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-714271b2-42eb-49e3-ae87-09c1bf3fdcbd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.052676] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 7841ebd2-0c23-4e32-8b81-42311a32c6fd/7841ebd2-0c23-4e32-8b81-42311a32c6fd.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1358.053335] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1208dd2-276f-4504-a850-1eea1a9655f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.079673] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1358.079673] env[63371]: value = "task-1773651" [ 1358.079673] env[63371]: _type = "Task" [ 1358.079673] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.089260] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773651, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.252647] env[63371]: INFO nova.compute.resource_tracker [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating resource usage from migration e496466e-2a3b-442c-9adb-941ce7e06a5e [ 1358.437488] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526a93c4-2741-b52a-550b-3a9dff5e50ed, 'name': SearchDatastore_Task, 'duration_secs': 0.013653} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.438319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1358.438319] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1358.438319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1358.438319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1358.438581] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1358.439610] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d789e9c5-369d-44f4-a0cb-8cb1e6330371 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.453732] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1358.453823] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1358.454559] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02420b35-8f4d-45d1-8426-30fa1295828a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.462746] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1358.462746] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5266d001-9833-62e6-38ac-29137b426daa" [ 1358.462746] env[63371]: _type = "Task" [ 1358.462746] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.475377] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5266d001-9833-62e6-38ac-29137b426daa, 'name': SearchDatastore_Task, 'duration_secs': 0.010031} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.476194] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-758b31ca-8584-4b01-95f3-8a0e8631db93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.483110] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1358.483110] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52272e96-65dd-2946-759b-83e67f54882e" [ 1358.483110] env[63371]: _type = "Task" [ 1358.483110] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.491971] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52272e96-65dd-2946-759b-83e67f54882e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.593375] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773651, 'name': ReconfigVM_Task, 'duration_secs': 0.304821} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.595403] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 7841ebd2-0c23-4e32-8b81-42311a32c6fd/7841ebd2-0c23-4e32-8b81-42311a32c6fd.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1358.595868] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-315d7f15-589f-40f0-b3d4-74b6a61c2422 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.604921] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1358.604921] env[63371]: value = "task-1773652" [ 1358.604921] env[63371]: _type = "Task" [ 1358.604921] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1358.617511] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773652, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.878136] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb8848e-2114-4428-b3f0-bad29ba76088 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.889558] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7ffbff-d0e5-4225-bb48-0be97821a59f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.924738] env[63371]: DEBUG nova.network.neutron [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Successfully updated port: e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1358.926767] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0214f55b-909b-4680-b554-c0c2c5e58499 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.936433] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e472eb9-21dd-4c6d-9be1-f0f56a06c31f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.955013] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1359.000522] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52272e96-65dd-2946-759b-83e67f54882e, 'name': SearchDatastore_Task, 'duration_secs': 0.009675} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.000856] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1359.001127] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1359.001406] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-158150a2-fad0-48f1-a31c-077ad3503046 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.014836] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1359.014836] env[63371]: value = "task-1773653" [ 1359.014836] env[63371]: _type = "Task" [ 1359.014836] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.024726] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773653, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.119154] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773652, 'name': Rename_Task, 'duration_secs': 0.168114} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.119630] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1359.119992] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1f3dae5-d952-44d9-b41f-fe053ea690a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.135196] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1359.135196] env[63371]: value = "task-1773654" [ 1359.135196] env[63371]: _type = "Task" [ 1359.135196] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.145360] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773654, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.201999] env[63371]: DEBUG nova.compute.manager [req-e20dff03-d1a3-4928-988f-dbe799d685fd req-29b82cd9-b677-4dc1-a3ba-f2a50fd8a4b6 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Received event network-vif-plugged-e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1359.207887] env[63371]: DEBUG oslo_concurrency.lockutils [req-e20dff03-d1a3-4928-988f-dbe799d685fd req-29b82cd9-b677-4dc1-a3ba-f2a50fd8a4b6 service nova] Acquiring lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.215161] env[63371]: DEBUG oslo_concurrency.lockutils [req-e20dff03-d1a3-4928-988f-dbe799d685fd req-29b82cd9-b677-4dc1-a3ba-f2a50fd8a4b6 service nova] Lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.215161] env[63371]: DEBUG oslo_concurrency.lockutils [req-e20dff03-d1a3-4928-988f-dbe799d685fd req-29b82cd9-b677-4dc1-a3ba-f2a50fd8a4b6 service nova] Lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.009s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.215161] env[63371]: DEBUG nova.compute.manager [req-e20dff03-d1a3-4928-988f-dbe799d685fd req-29b82cd9-b677-4dc1-a3ba-f2a50fd8a4b6 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] No waiting events found dispatching network-vif-plugged-e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1359.215161] env[63371]: WARNING nova.compute.manager [req-e20dff03-d1a3-4928-988f-dbe799d685fd req-29b82cd9-b677-4dc1-a3ba-f2a50fd8a4b6 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Received unexpected event network-vif-plugged-e2249de3-2c03-4371-aab4-6173dd2b5d56 for instance with vm_state building and task_state spawning. [ 1359.299107] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "362d8303-524a-457a-b8d9-2bad87fa816b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.299433] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "362d8303-524a-457a-b8d9-2bad87fa816b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.299433] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "362d8303-524a-457a-b8d9-2bad87fa816b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.300092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "362d8303-524a-457a-b8d9-2bad87fa816b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.300092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "362d8303-524a-457a-b8d9-2bad87fa816b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1359.309251] env[63371]: INFO nova.compute.manager [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Terminating instance [ 1359.313180] env[63371]: DEBUG nova.compute.manager [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1359.313180] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1359.315085] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e66c313-023a-4c77-9b9f-81400665f751 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.327797] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1359.333983] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4de479f3-6880-41f0-980d-50987f6fa86e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.342832] env[63371]: DEBUG oslo_vmware.api [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1359.342832] env[63371]: value = "task-1773655" [ 1359.342832] env[63371]: _type = "Task" [ 1359.342832] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.356492] env[63371]: DEBUG oslo_vmware.api [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773655, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.434049] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "refresh_cache-e4608e3c-7083-42fa-b88c-8ee007ef7f60" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1359.434049] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "refresh_cache-e4608e3c-7083-42fa-b88c-8ee007ef7f60" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.434049] env[63371]: DEBUG nova.network.neutron [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1359.497542] env[63371]: ERROR nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [req-1b4621ad-cc4e-47bb-93b5-dae0216c5ac0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1b4621ad-cc4e-47bb-93b5-dae0216c5ac0"}]} [ 1359.522234] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1359.530780] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773653, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501153} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.531080] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1359.531390] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1359.531586] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4281c19d-d536-4c29-8fc1-0a538edad943 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.543719] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1359.543719] env[63371]: value = "task-1773656" [ 1359.543719] env[63371]: _type = "Task" [ 1359.543719] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.554962] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773656, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.556296] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1359.556552] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1359.573504] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1359.587481] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "b5e259ea-d103-41c6-84b3-748813bb514d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.587858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "b5e259ea-d103-41c6-84b3-748813bb514d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.598691] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1359.648808] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773654, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.661052] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.661307] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.700590] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "36b81143-211f-4c77-854b-abe0d3f39ce4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1359.702647] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1359.855581] env[63371]: DEBUG oslo_vmware.api [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773655, 'name': PowerOffVM_Task, 'duration_secs': 0.267448} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.855878] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1359.856080] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1359.858984] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4ace4c8-5219-4a6d-b47b-3b4f7837d79d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.943935] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1359.943935] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1359.943935] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Deleting the datastore file [datastore1] 362d8303-524a-457a-b8d9-2bad87fa816b {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1359.946147] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c61fc91-7035-43e9-b51d-ec3bfebdf665 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.953522] env[63371]: DEBUG oslo_vmware.api [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for the task: (returnval){ [ 1359.953522] env[63371]: value = "task-1773658" [ 1359.953522] env[63371]: _type = "Task" [ 1359.953522] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.965901] env[63371]: DEBUG oslo_vmware.api [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773658, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.985148] env[63371]: DEBUG nova.network.neutron [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1360.055882] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773656, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081348} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.055882] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1360.056652] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1d2e8d-0478-41af-b4c1-0c7f468851a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.085709] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1360.089261] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-560ef428-cb06-4042-8e4b-687d74c71bd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.113589] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1360.113589] env[63371]: value = "task-1773659" [ 1360.113589] env[63371]: _type = "Task" [ 1360.113589] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.128761] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773659, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.142292] env[63371]: DEBUG oslo_vmware.api [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773654, 'name': PowerOnVM_Task, 'duration_secs': 0.642961} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.145274] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1360.145540] env[63371]: INFO nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Took 9.13 seconds to spawn the instance on the hypervisor. [ 1360.146312] env[63371]: DEBUG nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1360.146840] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8dd30a9-3d08-400d-bc74-e195b62a5157 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.151405] env[63371]: DEBUG nova.network.neutron [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Updating instance_info_cache with network_info: [{"id": "e2249de3-2c03-4371-aab4-6173dd2b5d56", "address": "fa:16:3e:4b:68:50", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2249de3-2c", "ovs_interfaceid": "e2249de3-2c03-4371-aab4-6173dd2b5d56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.230512] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26fca64f-eb7c-4d44-a154-2fd217a8ee36 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.244300] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86712cfc-0a95-4f13-adf5-351dcb1cbc8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.277491] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca41cf6f-67e8-4849-be30-b58c10aaf2f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.285998] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9085e9fa-c241-4cf1-8f73-f92d99ffe6f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.300598] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1360.468559] env[63371]: DEBUG oslo_vmware.api [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Task: {'id': task-1773658, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133731} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.468559] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1360.468559] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1360.468559] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1360.468559] env[63371]: INFO nova.compute.manager [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1360.468877] env[63371]: DEBUG oslo.service.loopingcall [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1360.468877] env[63371]: DEBUG nova.compute.manager [-] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1360.468877] env[63371]: DEBUG nova.network.neutron [-] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1360.630614] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773659, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.658730] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "refresh_cache-e4608e3c-7083-42fa-b88c-8ee007ef7f60" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1360.659109] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance network_info: |[{"id": "e2249de3-2c03-4371-aab4-6173dd2b5d56", "address": "fa:16:3e:4b:68:50", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2249de3-2c", "ovs_interfaceid": "e2249de3-2c03-4371-aab4-6173dd2b5d56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1360.663607] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:68:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e2249de3-2c03-4371-aab4-6173dd2b5d56', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1360.672484] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating folder: Project (a4ca8a73414142d497ebd3d3f043d9ae). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1360.673840] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28b03ac2-5a09-4a50-a02f-8ea708bb16d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.676225] env[63371]: INFO nova.compute.manager [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Took 31.41 seconds to build instance. [ 1360.688707] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created folder: Project (a4ca8a73414142d497ebd3d3f043d9ae) in parent group-v368199. [ 1360.689109] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating folder: Instances. Parent ref: group-v368255. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1360.689545] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-460a48e3-eed9-4d9a-b22d-b047648ee019 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.701623] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created folder: Instances in parent group-v368255. [ 1360.702062] env[63371]: DEBUG oslo.service.loopingcall [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1360.702705] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1360.703361] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-556df988-3952-408a-a810-dacc7f9bad8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.728310] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1360.728310] env[63371]: value = "task-1773662" [ 1360.728310] env[63371]: _type = "Task" [ 1360.728310] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.738548] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773662, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.826628] env[63371]: ERROR nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [req-d26bc867-3985-49e5-8d08-ecbb48acf20c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d26bc867-3985-49e5-8d08-ecbb48acf20c"}]} [ 1360.851374] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1360.867763] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1360.867969] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1360.885202] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1360.907668] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1361.126792] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773659, 'name': ReconfigVM_Task, 'duration_secs': 0.77588} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.127125] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658/1924d3d2-cc88-4fd2-b509-8463da796658.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1361.127758] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7c469bfd-b048-41a3-9c98-5a4c5ee01700 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.136089] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1361.136089] env[63371]: value = "task-1773663" [ 1361.136089] env[63371]: _type = "Task" [ 1361.136089] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.150119] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773663, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.171028] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.171319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.178467] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a06e7c59-d08b-462b-a887-ef624bc15741 tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.437s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1361.225929] env[63371]: DEBUG nova.network.neutron [-] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1361.242175] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773662, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.427381] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "50d5eac1-0752-4089-948c-b04439df6f6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1361.427489] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "50d5eac1-0752-4089-948c-b04439df6f6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1361.443928] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9453433d-1274-4af1-8378-186cc5a3023f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.454365] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c75719a-7033-4055-9298-b5c0eeb7210d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.489998] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc930b82-6930-461b-8437-7b462032bf94 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.499084] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80fd089d-5d69-48b0-9451-20b6bb18588c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.513372] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1361.647898] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773663, 'name': Rename_Task, 'duration_secs': 0.143294} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.648188] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1361.648462] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e18884c1-5eae-45d7-8a59-5ddae62cd112 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.657937] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1361.657937] env[63371]: value = "task-1773664" [ 1361.657937] env[63371]: _type = "Task" [ 1361.657937] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.669309] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773664, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.681023] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1361.728764] env[63371]: INFO nova.compute.manager [-] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Took 1.26 seconds to deallocate network for instance. [ 1361.746504] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773662, 'name': CreateVM_Task, 'duration_secs': 0.668668} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.746681] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1361.751477] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.751696] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.752256] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1361.752337] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b35ee775-5a4a-4685-9cba-353aaa877707 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.759443] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1361.759443] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526b26c1-6920-16e4-ce7d-0b35564614aa" [ 1361.759443] env[63371]: _type = "Task" [ 1361.759443] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.774190] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526b26c1-6920-16e4-ce7d-0b35564614aa, 'name': SearchDatastore_Task, 'duration_secs': 0.010237} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.774497] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1361.774743] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1361.774982] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.775151] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.775346] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1361.775622] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55ed11aa-a62a-4f5f-ae1e-2718763d46ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.789701] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1361.789888] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1361.790645] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8691d884-f807-49c2-bd10-ca6962f96106 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.797389] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1361.797389] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52605ebc-cf52-0a85-b4a5-9bbbc01e0493" [ 1361.797389] env[63371]: _type = "Task" [ 1361.797389] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.805807] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52605ebc-cf52-0a85-b4a5-9bbbc01e0493, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.885355] env[63371]: DEBUG nova.compute.manager [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Received event network-changed-e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1361.885470] env[63371]: DEBUG nova.compute.manager [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Refreshing instance network info cache due to event network-changed-e2249de3-2c03-4371-aab4-6173dd2b5d56. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1361.885691] env[63371]: DEBUG oslo_concurrency.lockutils [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] Acquiring lock "refresh_cache-e4608e3c-7083-42fa-b88c-8ee007ef7f60" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1361.886007] env[63371]: DEBUG oslo_concurrency.lockutils [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] Acquired lock "refresh_cache-e4608e3c-7083-42fa-b88c-8ee007ef7f60" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1361.887489] env[63371]: DEBUG nova.network.neutron [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Refreshing network info cache for port e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1362.034924] env[63371]: ERROR nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [req-c7922e61-1ea0-4b1c-b032-51d747506aca] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c7922e61-1ea0-4b1c-b032-51d747506aca"}]} [ 1362.052137] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1362.069125] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1362.069125] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1362.083799] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1362.104431] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1362.169733] env[63371]: DEBUG oslo_vmware.api [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773664, 'name': PowerOnVM_Task, 'duration_secs': 0.46225} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.170080] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1362.170237] env[63371]: DEBUG nova.compute.manager [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1362.171030] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a60a53-3068-4ff7-aa5a-8e9eb05f9554 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.204261] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.240738] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.251727] env[63371]: DEBUG nova.compute.manager [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Received event network-changed {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1362.252039] env[63371]: DEBUG nova.compute.manager [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Refreshing instance network info cache due to event network-changed. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1362.252256] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] Acquiring lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1362.252397] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] Acquired lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1362.252552] env[63371]: DEBUG nova.network.neutron [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1362.312458] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52605ebc-cf52-0a85-b4a5-9bbbc01e0493, 'name': SearchDatastore_Task, 'duration_secs': 0.009259} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.318312] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd46db18-3028-4aa7-ba96-05ef51923839 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.325419] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1362.325419] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bd722d-d187-2d96-1ea2-9e14b78edb08" [ 1362.325419] env[63371]: _type = "Task" [ 1362.325419] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.334711] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bd722d-d187-2d96-1ea2-9e14b78edb08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1362.691847] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc62c889-9c6a-4d07-bace-662579bc5ca2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.698445] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1362.704937] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83376b1e-067f-425c-9d66-7195ee228a4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.739333] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0466e4-c035-4419-b545-409ceac92731 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.747974] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf61ff5-bbad-4223-874a-6f0ddfbad3a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.766165] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1362.835316] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bd722d-d187-2d96-1ea2-9e14b78edb08, 'name': SearchDatastore_Task, 'duration_secs': 0.011484} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1362.835586] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1362.835843] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1362.836118] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9f981aac-d3df-4c26-b185-33529a8abaa9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1362.844011] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1362.844011] env[63371]: value = "task-1773665" [ 1362.844011] env[63371]: _type = "Task" [ 1362.844011] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1362.854616] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.047875] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.048195] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.048494] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1363.048702] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.048873] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.051475] env[63371]: INFO nova.compute.manager [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Terminating instance [ 1363.053525] env[63371]: DEBUG nova.compute.manager [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1363.053776] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1363.054617] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f472da0-f138-4c45-b51f-366be758066a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.065601] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1363.065764] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-febecd38-ae5d-49ef-9d6b-8ad39ee279ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.074857] env[63371]: DEBUG oslo_vmware.api [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1363.074857] env[63371]: value = "task-1773666" [ 1363.074857] env[63371]: _type = "Task" [ 1363.074857] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.090030] env[63371]: DEBUG oslo_vmware.api [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773666, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.240394] env[63371]: DEBUG nova.network.neutron [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Updated VIF entry in instance network info cache for port e2249de3-2c03-4371-aab4-6173dd2b5d56. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1363.241198] env[63371]: DEBUG nova.network.neutron [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Updating instance_info_cache with network_info: [{"id": "e2249de3-2c03-4371-aab4-6173dd2b5d56", "address": "fa:16:3e:4b:68:50", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape2249de3-2c", "ovs_interfaceid": "e2249de3-2c03-4371-aab4-6173dd2b5d56", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.315216] env[63371]: DEBUG nova.scheduler.client.report [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 49 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1363.315216] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 49 to 50 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1363.315216] env[63371]: DEBUG nova.compute.provider_tree [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1363.360036] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773665, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.438600] env[63371]: DEBUG nova.network.neutron [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Updating instance_info_cache with network_info: [{"id": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "address": "fa:16:3e:19:9f:ce", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.15", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4493eb7b-33", "ovs_interfaceid": "4493eb7b-33d3-4a78-a1dd-3a96c6144850", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1363.587709] env[63371]: DEBUG oslo_vmware.api [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773666, 'name': PowerOffVM_Task, 'duration_secs': 0.347384} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.588073] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1363.588292] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1363.588639] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-008d6851-ac16-4d28-91fb-34778699c39e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.684141] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1363.684473] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1363.684711] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Deleting the datastore file [datastore1] 7841ebd2-0c23-4e32-8b81-42311a32c6fd {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1363.685080] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20a03c54-2fbc-4d4f-8454-107f80881184 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.694558] env[63371]: DEBUG oslo_vmware.api [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for the task: (returnval){ [ 1363.694558] env[63371]: value = "task-1773668" [ 1363.694558] env[63371]: _type = "Task" [ 1363.694558] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.704833] env[63371]: DEBUG oslo_vmware.api [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.746725] env[63371]: DEBUG oslo_concurrency.lockutils [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] Releasing lock "refresh_cache-e4608e3c-7083-42fa-b88c-8ee007ef7f60" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.747868] env[63371]: DEBUG nova.compute.manager [req-d68949ed-e2f1-4b1f-857e-1dc2c0a4911c req-c55d5062-0896-4f5c-88b1-5d5a951fe5c8 service nova] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Received event network-vif-deleted-54ac14c5-812a-455e-88ff-92040c426688 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1363.821998] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 6.589s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1363.822244] env[63371]: INFO nova.compute.manager [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Migrating [ 1363.822507] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1363.822648] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "compute-rpcapi-router" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1363.823977] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.048s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1363.827331] env[63371]: INFO nova.compute.claims [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1363.833019] env[63371]: INFO nova.compute.rpcapi [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 1363.833651] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "compute-rpcapi-router" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1363.870269] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558896} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1363.870617] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1363.870936] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1363.871306] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47ffbb41-002b-4e00-9964-81257552636c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.882309] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1363.882309] env[63371]: value = "task-1773669" [ 1363.882309] env[63371]: _type = "Task" [ 1363.882309] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.893311] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773669, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1363.943824] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0e02847d-182a-4126-8d0f-7c5b37a5a0ec tempest-ServerExternalEventsTest-83247919 tempest-ServerExternalEventsTest-83247919-project] Releasing lock "refresh_cache-7841ebd2-0c23-4e32-8b81-42311a32c6fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1364.038146] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "1924d3d2-cc88-4fd2-b509-8463da796658" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.038146] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "1924d3d2-cc88-4fd2-b509-8463da796658" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.038146] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "1924d3d2-cc88-4fd2-b509-8463da796658-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1364.038146] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "1924d3d2-cc88-4fd2-b509-8463da796658-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1364.038306] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "1924d3d2-cc88-4fd2-b509-8463da796658-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1364.039759] env[63371]: INFO nova.compute.manager [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Terminating instance [ 1364.044123] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "refresh_cache-1924d3d2-cc88-4fd2-b509-8463da796658" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.044123] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "refresh_cache-1924d3d2-cc88-4fd2-b509-8463da796658" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.044123] env[63371]: DEBUG nova.network.neutron [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1364.211530] env[63371]: DEBUG oslo_vmware.api [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Task: {'id': task-1773668, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.182637} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.214616] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1364.214616] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1364.214616] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1364.214616] env[63371]: INFO nova.compute.manager [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1364.214616] env[63371]: DEBUG oslo.service.loopingcall [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1364.214783] env[63371]: DEBUG nova.compute.manager [-] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1364.214783] env[63371]: DEBUG nova.network.neutron [-] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1364.362167] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1364.362460] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.362689] env[63371]: DEBUG nova.network.neutron [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1364.393884] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773669, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072954} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.394193] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1364.395079] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd9ab860-1a28-4bf2-820f-b1b0ce072a20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.422545] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1364.422659] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf358dfe-7a1a-4bd4-9df7-f1e81da662cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.449999] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1364.449999] env[63371]: value = "task-1773670" [ 1364.449999] env[63371]: _type = "Task" [ 1364.449999] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.459324] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773670, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.580280] env[63371]: DEBUG nova.network.neutron [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1364.694993] env[63371]: DEBUG nova.network.neutron [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.930299] env[63371]: DEBUG nova.compute.manager [req-f8737874-7ea4-432f-b8d5-b9e2052f1696 req-1f941a82-665a-4ef7-af40-1da8d84eebe0 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Received event network-vif-deleted-4493eb7b-33d3-4a78-a1dd-3a96c6144850 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1364.930561] env[63371]: INFO nova.compute.manager [req-f8737874-7ea4-432f-b8d5-b9e2052f1696 req-1f941a82-665a-4ef7-af40-1da8d84eebe0 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Neutron deleted interface 4493eb7b-33d3-4a78-a1dd-3a96c6144850; detaching it from the instance and deleting it from the info cache [ 1364.930748] env[63371]: DEBUG nova.network.neutron [req-f8737874-7ea4-432f-b8d5-b9e2052f1696 req-1f941a82-665a-4ef7-af40-1da8d84eebe0 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.963034] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773670, 'name': ReconfigVM_Task, 'duration_secs': 0.325105} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.965772] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Reconfigured VM instance instance-00000013 to attach disk [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1364.967096] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b19c2df-ea8b-4a34-8c69-a69c4e938642 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.976444] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1364.976444] env[63371]: value = "task-1773671" [ 1364.976444] env[63371]: _type = "Task" [ 1364.976444] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.995351] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773671, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.153791] env[63371]: DEBUG nova.network.neutron [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance_info_cache with network_info: [{"id": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "address": "fa:16:3e:39:29:ca", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eac2e62-a1", "ovs_interfaceid": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.197217] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "refresh_cache-1924d3d2-cc88-4fd2-b509-8463da796658" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.197347] env[63371]: DEBUG nova.compute.manager [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1365.197554] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1365.198661] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036da373-6de8-4c51-8f7d-32aacec2b415 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.208579] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1365.208988] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba08c0cf-8873-403e-b9a5-e76efc7003da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.221615] env[63371]: DEBUG nova.network.neutron [-] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1365.223483] env[63371]: DEBUG oslo_vmware.api [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1365.223483] env[63371]: value = "task-1773672" [ 1365.223483] env[63371]: _type = "Task" [ 1365.223483] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.232536] env[63371]: DEBUG oslo_vmware.api [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773672, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.404872] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a36987c-c18f-4d8f-9759-092bef8ad55c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.413491] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34f97bc6-359a-4759-aac0-bbadd4f7ee6e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.444211] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-861dc69b-5481-4a0d-8e0a-aef778aef43b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.446513] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238aa8b4-fe9d-4d6f-a88b-d4724df6aa8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.456301] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685051be-6146-4916-a915-872a670907eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.462639] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6b1a47-1dc8-4d7a-8870-967c4ad1dcb3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.484949] env[63371]: DEBUG nova.compute.provider_tree [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1365.504585] env[63371]: DEBUG nova.compute.manager [req-f8737874-7ea4-432f-b8d5-b9e2052f1696 req-1f941a82-665a-4ef7-af40-1da8d84eebe0 service nova] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Detach interface failed, port_id=4493eb7b-33d3-4a78-a1dd-3a96c6144850, reason: Instance 7841ebd2-0c23-4e32-8b81-42311a32c6fd could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1365.514415] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773671, 'name': Rename_Task, 'duration_secs': 0.156152} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.514970] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1365.515246] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b1de598-e5de-41ee-9589-d36ad90e7836 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.522371] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1365.522371] env[63371]: value = "task-1773673" [ 1365.522371] env[63371]: _type = "Task" [ 1365.522371] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.532139] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773673, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1365.547687] env[63371]: DEBUG nova.scheduler.client.report [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 50 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1365.547955] env[63371]: DEBUG nova.compute.provider_tree [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 50 to 51 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1365.548152] env[63371]: DEBUG nova.compute.provider_tree [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1365.657108] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1365.725840] env[63371]: INFO nova.compute.manager [-] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Took 1.51 seconds to deallocate network for instance. [ 1365.741749] env[63371]: DEBUG oslo_vmware.api [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773672, 'name': PowerOffVM_Task, 'duration_secs': 0.199142} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1365.741981] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1365.742171] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1365.742446] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f984fff-fcf7-4e85-accb-e587a9830ead {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.773773] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1365.774062] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1365.774220] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleting the datastore file [datastore1] 1924d3d2-cc88-4fd2-b509-8463da796658 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1365.774480] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72839552-6eb1-4706-bd9b-fbc4d1a974bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1365.782034] env[63371]: DEBUG oslo_vmware.api [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1365.782034] env[63371]: value = "task-1773675" [ 1365.782034] env[63371]: _type = "Task" [ 1365.782034] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1365.795330] env[63371]: DEBUG oslo_vmware.api [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.035058] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773673, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1366.060464] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.234s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1366.061122] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1366.065189] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.970s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1366.065556] env[63371]: DEBUG nova.objects.instance [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lazy-loading 'resources' on Instance uuid d9523239-79d1-434f-977a-e1f0e358c82b {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1366.236846] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1366.297899] env[63371]: DEBUG oslo_vmware.api [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091378} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.298376] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1366.298418] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1366.298575] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1366.298746] env[63371]: INFO nova.compute.manager [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1366.298995] env[63371]: DEBUG oslo.service.loopingcall [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1366.299209] env[63371]: DEBUG nova.compute.manager [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1366.299309] env[63371]: DEBUG nova.network.neutron [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1366.317877] env[63371]: DEBUG nova.network.neutron [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1366.538581] env[63371]: DEBUG oslo_vmware.api [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773673, 'name': PowerOnVM_Task, 'duration_secs': 0.540835} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1366.539262] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1366.539739] env[63371]: INFO nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1366.540304] env[63371]: DEBUG nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1366.542302] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9cf06b6-6375-4185-9c36-0907ad87dd44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.567924] env[63371]: DEBUG nova.compute.utils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1366.569456] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1366.572857] env[63371]: DEBUG nova.network.neutron [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1366.716963] env[63371]: DEBUG nova.policy [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01c1437e43364f0ba8db6677fe2ed978', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3fa37041acf4211987c97c105c47cf0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1366.820301] env[63371]: DEBUG nova.network.neutron [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1367.068434] env[63371]: INFO nova.compute.manager [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Took 33.22 seconds to build instance. [ 1367.078725] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1367.177916] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7264c6-e9a1-4cc7-b98e-0bec65df7c2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.209722] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 0 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1367.216848] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b7e41d2-3323-4dc9-b4ba-e92d228a82f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.225293] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6f85d1-d4b2-48c7-b2a0-2b56a5a38345 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.259560] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fc63d0-c4a0-4a7a-bafc-b8b7e756d2dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.268587] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb6e7b0-d0df-4b5e-b405-045319455f3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.286543] env[63371]: DEBUG nova.compute.provider_tree [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1367.324933] env[63371]: INFO nova.compute.manager [-] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Took 1.03 seconds to deallocate network for instance. [ 1367.369472] env[63371]: DEBUG nova.network.neutron [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Successfully created port: fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1367.571072] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5803eab5-b8cb-4916-8775-36c25bdde2c1 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.814s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1367.724256] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1367.724256] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80ac13b8-48b3-48c5-addc-81a4e51f354c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.735266] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1367.735266] env[63371]: value = "task-1773676" [ 1367.735266] env[63371]: _type = "Task" [ 1367.735266] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.747557] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773676, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.795109] env[63371]: DEBUG nova.scheduler.client.report [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1367.800025] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1367.800579] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1367.834942] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.077910] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1368.091247] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1368.117181] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1368.119090] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1368.119090] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1368.119090] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1368.119090] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1368.119090] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1368.119294] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1368.119294] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1368.119294] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1368.119294] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1368.119294] env[63371]: DEBUG nova.virt.hardware [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1368.119934] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501df96f-dd66-411b-ad9d-bbf386fd3afe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.128894] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327e6ac5-434c-432f-a42e-a8d7b652e2d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.246782] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773676, 'name': PowerOffVM_Task, 'duration_secs': 0.23979} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.247071] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1368.247259] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 17 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1368.302013] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.237s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1368.304341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.682s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1368.305789] env[63371]: INFO nova.compute.claims [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1368.336335] env[63371]: INFO nova.scheduler.client.report [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted allocations for instance d9523239-79d1-434f-977a-e1f0e358c82b [ 1368.601288] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1368.753620] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1368.754032] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1368.754032] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1368.754207] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1368.754345] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1368.754485] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1368.754688] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1368.754842] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1368.755008] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1368.757595] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1368.757754] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1368.763136] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-341fc6a6-4255-4fd2-9d63-c14226a99ace {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1368.780799] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1368.780799] env[63371]: value = "task-1773677" [ 1368.780799] env[63371]: _type = "Task" [ 1368.780799] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1368.789748] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773677, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.844534] env[63371]: DEBUG oslo_concurrency.lockutils [None req-984d4ffb-ce23-4668-9c08-394b9a9d2bc6 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "d9523239-79d1-434f-977a-e1f0e358c82b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.536s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.044891] env[63371]: DEBUG nova.compute.manager [req-e40496fd-6cf5-42aa-9c5a-5257150cc936 req-c0438f73-2d68-4040-bd75-901c1c37a072 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-vif-plugged-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1369.045138] env[63371]: DEBUG oslo_concurrency.lockutils [req-e40496fd-6cf5-42aa-9c5a-5257150cc936 req-c0438f73-2d68-4040-bd75-901c1c37a072 service nova] Acquiring lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1369.045336] env[63371]: DEBUG oslo_concurrency.lockutils [req-e40496fd-6cf5-42aa-9c5a-5257150cc936 req-c0438f73-2d68-4040-bd75-901c1c37a072 service nova] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1369.045504] env[63371]: DEBUG oslo_concurrency.lockutils [req-e40496fd-6cf5-42aa-9c5a-5257150cc936 req-c0438f73-2d68-4040-bd75-901c1c37a072 service nova] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1369.045664] env[63371]: DEBUG nova.compute.manager [req-e40496fd-6cf5-42aa-9c5a-5257150cc936 req-c0438f73-2d68-4040-bd75-901c1c37a072 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] No waiting events found dispatching network-vif-plugged-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1369.045839] env[63371]: WARNING nova.compute.manager [req-e40496fd-6cf5-42aa-9c5a-5257150cc936 req-c0438f73-2d68-4040-bd75-901c1c37a072 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received unexpected event network-vif-plugged-fcd67cd5-500d-457a-9bbb-655583d97dd2 for instance with vm_state building and task_state spawning. [ 1369.081863] env[63371]: DEBUG nova.network.neutron [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Successfully updated port: fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1369.291230] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773677, 'name': ReconfigVM_Task, 'duration_secs': 0.216396} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1369.291632] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 33 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1369.583617] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1369.583990] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.584282] env[63371]: DEBUG nova.network.neutron [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1369.798053] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1369.798343] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1369.799018] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1369.799712] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1369.799712] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1369.799712] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1369.799815] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1369.800354] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1369.800354] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1369.800354] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1369.800467] env[63371]: DEBUG nova.virt.hardware [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1369.808491] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Reconfiguring VM instance instance-00000009 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1369.811282] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-443f8ccc-96df-48b5-a62d-c98f5f6c09b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.831137] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1369.831137] env[63371]: value = "task-1773678" [ 1369.831137] env[63371]: _type = "Task" [ 1369.831137] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1369.842838] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773678, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1369.886054] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615777db-f3cc-4cce-ba72-be08234ed74f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.898712] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d89aedbe-ba54-4026-845e-5092a4370822 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.932704] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99abe26b-948d-4cfb-8d29-93efe6cdf822 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.941577] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7fffcce-0459-48d9-a17a-c05a4cc012c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1369.955542] env[63371]: DEBUG nova.compute.provider_tree [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1370.006209] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "64fc862c-a755-4cac-997b-7a8328638269" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.006396] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "64fc862c-a755-4cac-997b-7a8328638269" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.038698] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "dcf8063b-56eb-439c-bee5-139a1e157714" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.038945] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "dcf8063b-56eb-439c-bee5-139a1e157714" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.130673] env[63371]: DEBUG nova.network.neutron [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1370.309343] env[63371]: DEBUG nova.network.neutron [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.341768] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773678, 'name': ReconfigVM_Task, 'duration_secs': 0.152762} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.342060] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Reconfigured VM instance instance-00000009 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1370.342842] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589fdc54-772f-4624-b8f2-c6207344a46d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.365546] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed/f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1370.365848] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a6e7438-8e14-4fa4-abba-0db4f9ac1e13 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.386017] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1370.386017] env[63371]: value = "task-1773679" [ 1370.386017] env[63371]: _type = "Task" [ 1370.386017] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.393336] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773679, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.461613] env[63371]: DEBUG nova.scheduler.client.report [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1370.770033] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1370.770402] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.815019] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1370.815019] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Instance network_info: |[{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1370.815438] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:92:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca99f7a1-6365-4d3c-af16-1b1c1288091e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fcd67cd5-500d-457a-9bbb-655583d97dd2', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1370.821025] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Creating folder: Project (b3fa37041acf4211987c97c105c47cf0). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1370.821433] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84786e61-a8e9-4b18-ba8a-85aac3a8946a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.835013] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Created folder: Project (b3fa37041acf4211987c97c105c47cf0) in parent group-v368199. [ 1370.835013] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Creating folder: Instances. Parent ref: group-v368258. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1370.835013] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7dd536d-3151-45de-a8a9-d3d1f08b4877 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.844129] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Created folder: Instances in parent group-v368258. [ 1370.844129] env[63371]: DEBUG oslo.service.loopingcall [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1370.844129] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1370.844129] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d82d5aa9-7e31-4206-bd35-c7a82de78582 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.862033] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1370.862033] env[63371]: value = "task-1773682" [ 1370.862033] env[63371]: _type = "Task" [ 1370.862033] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.869580] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773682, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.893745] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773679, 'name': ReconfigVM_Task, 'duration_secs': 0.24862} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.897018] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Reconfigured VM instance instance-00000009 to attach disk [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed/f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1370.897018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 50 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1370.968797] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.662s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1370.968797] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1370.970181] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.258s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1370.972694] env[63371]: INFO nova.compute.claims [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1371.080315] env[63371]: DEBUG nova.compute.manager [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1371.080695] env[63371]: DEBUG nova.compute.manager [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing instance network info cache due to event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1371.081060] env[63371]: DEBUG oslo_concurrency.lockutils [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] Acquiring lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.081332] env[63371]: DEBUG oslo_concurrency.lockutils [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] Acquired lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.081614] env[63371]: DEBUG nova.network.neutron [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1371.372149] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773682, 'name': CreateVM_Task, 'duration_secs': 0.330816} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.372343] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1371.373056] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.373227] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.373810] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1371.374083] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d4966ec-0420-45cc-9600-d9319a678a8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.378463] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1371.378463] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b37cf2-88c4-83e5-ce7c-5b1965a698bb" [ 1371.378463] env[63371]: _type = "Task" [ 1371.378463] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.386642] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b37cf2-88c4-83e5-ce7c-5b1965a698bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.401018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ff26ae-1b58-4cd8-8a41-4361f1f29500 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.420223] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15c989e-8f0c-42c5-a377-cfffda9f1ea7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.437305] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 67 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1371.471509] env[63371]: DEBUG nova.compute.utils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1371.473391] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1371.473565] env[63371]: DEBUG nova.network.neutron [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1371.537593] env[63371]: DEBUG nova.policy [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38c65e6dd9e4468fb1a0235bac086151', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4ca8a73414142d497ebd3d3f043d9ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1371.899152] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b37cf2-88c4-83e5-ce7c-5b1965a698bb, 'name': SearchDatastore_Task, 'duration_secs': 0.008997} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.899152] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1371.899152] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1371.899152] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1371.899491] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1371.899491] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1371.899491] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87685007-d58b-4a1a-bbc7-8a92e752f442 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.908609] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1371.908862] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1371.909916] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b1e574d-c72d-48a6-aff6-137f739accc1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1371.918038] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1371.918038] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52950c01-82a6-4f33-8edc-5259359f24a4" [ 1371.918038] env[63371]: _type = "Task" [ 1371.918038] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1371.929529] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52950c01-82a6-4f33-8edc-5259359f24a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.976384] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1372.042197] env[63371]: DEBUG nova.network.neutron [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updated VIF entry in instance network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1372.042414] env[63371]: DEBUG nova.network.neutron [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1372.044262] env[63371]: DEBUG nova.network.neutron [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Successfully created port: 225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1372.046470] env[63371]: DEBUG nova.network.neutron [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Port 3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1372.431057] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52950c01-82a6-4f33-8edc-5259359f24a4, 'name': SearchDatastore_Task, 'duration_secs': 0.009625} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.431969] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df9dfd45-1b8e-4354-856b-5e3f2e2f101c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.438898] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1372.438898] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fa65b7-e39b-b40f-7d4e-6265f71eb5c7" [ 1372.438898] env[63371]: _type = "Task" [ 1372.438898] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.446404] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fa65b7-e39b-b40f-7d4e-6265f71eb5c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1372.459553] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11506186-d691-48ca-8548-184b163ea44d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.465617] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95705997-4ac8-44dc-8e32-bbcdb5b859ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.499025] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c61e303-d604-4f21-b459-e768b97da9bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.505588] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00fd62ba-260a-4885-bb4a-ea88e857f03f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.520524] env[63371]: DEBUG nova.compute.provider_tree [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1372.547110] env[63371]: DEBUG oslo_concurrency.lockutils [req-ce337424-565c-4c97-9a7c-f85b2767941f req-f970401f-4664-48e0-beda-60627fb630b3 service nova] Releasing lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.950613] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fa65b7-e39b-b40f-7d4e-6265f71eb5c7, 'name': SearchDatastore_Task, 'duration_secs': 0.008992} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1372.950890] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1372.951160] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e0369f27-68ea-49c4-8524-3dbbb3cde96e/e0369f27-68ea-49c4-8524-3dbbb3cde96e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1372.951427] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0732151-86ae-48ce-9d1f-ac62acff60bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.957893] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1372.957893] env[63371]: value = "task-1773683" [ 1372.957893] env[63371]: _type = "Task" [ 1372.957893] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1372.971882] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.003325] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1373.034679] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1373.034954] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1373.035135] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1373.035338] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1373.035514] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1373.035640] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1373.035863] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1373.036043] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1373.036227] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1373.036402] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1373.036587] env[63371]: DEBUG nova.virt.hardware [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1373.037508] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8346b993-3528-44d5-9f8f-fa133d2ddd59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.045458] env[63371]: ERROR nova.scheduler.client.report [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [req-f1ed4243-b73c-430a-8cfe-40ff7e2632b4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f1ed4243-b73c-430a-8cfe-40ff7e2632b4"}]} [ 1373.046771] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255ecb81-47dd-46a7-a7a4-3ff1ef8038f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.071200] env[63371]: DEBUG nova.scheduler.client.report [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1373.079505] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.079746] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.079919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.095796] env[63371]: DEBUG nova.scheduler.client.report [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1373.096048] env[63371]: DEBUG nova.compute.provider_tree [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1373.107839] env[63371]: DEBUG nova.scheduler.client.report [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1373.126986] env[63371]: DEBUG nova.scheduler.client.report [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1373.469687] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456135} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.469890] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e0369f27-68ea-49c4-8524-3dbbb3cde96e/e0369f27-68ea-49c4-8524-3dbbb3cde96e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1373.470119] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1373.470380] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-69a96564-3125-4c6c-9794-152d29bdc011 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.476536] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1373.476536] env[63371]: value = "task-1773684" [ 1373.476536] env[63371]: _type = "Task" [ 1373.476536] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1373.488737] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773684, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1373.499266] env[63371]: DEBUG nova.compute.manager [req-4187124d-636b-4183-841d-2fd3ee69387b req-5fe4c7c1-c0be-435c-b736-e1fc2ed4a8b5 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Received event network-vif-plugged-225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1373.499510] env[63371]: DEBUG oslo_concurrency.lockutils [req-4187124d-636b-4183-841d-2fd3ee69387b req-5fe4c7c1-c0be-435c-b736-e1fc2ed4a8b5 service nova] Acquiring lock "e912c210-3ae1-47ce-b9cd-afebf6195606-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1373.499738] env[63371]: DEBUG oslo_concurrency.lockutils [req-4187124d-636b-4183-841d-2fd3ee69387b req-5fe4c7c1-c0be-435c-b736-e1fc2ed4a8b5 service nova] Lock "e912c210-3ae1-47ce-b9cd-afebf6195606-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1373.499916] env[63371]: DEBUG oslo_concurrency.lockutils [req-4187124d-636b-4183-841d-2fd3ee69387b req-5fe4c7c1-c0be-435c-b736-e1fc2ed4a8b5 service nova] Lock "e912c210-3ae1-47ce-b9cd-afebf6195606-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1373.500113] env[63371]: DEBUG nova.compute.manager [req-4187124d-636b-4183-841d-2fd3ee69387b req-5fe4c7c1-c0be-435c-b736-e1fc2ed4a8b5 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] No waiting events found dispatching network-vif-plugged-225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1373.500300] env[63371]: WARNING nova.compute.manager [req-4187124d-636b-4183-841d-2fd3ee69387b req-5fe4c7c1-c0be-435c-b736-e1fc2ed4a8b5 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Received unexpected event network-vif-plugged-225db88c-9e6c-40e6-a30e-a3830f2c411c for instance with vm_state building and task_state spawning. [ 1373.646106] env[63371]: DEBUG nova.network.neutron [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Successfully updated port: 225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1373.664642] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ed039e-8acc-42ee-b4b0-e4f3fb173722 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.673708] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebcdb53c-6953-438c-ae63-fab78666600f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.708574] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf735c6-de57-48fb-b4a0-f5b6110424f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.718230] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42363273-3629-4faa-9eef-e5c80f392b41 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1373.732648] env[63371]: DEBUG nova.compute.provider_tree [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1373.988582] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773684, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.053846} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1373.988846] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1373.989683] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f153dc6b-98e4-4e89-9a19-a42bd395b599 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.014938] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] e0369f27-68ea-49c4-8524-3dbbb3cde96e/e0369f27-68ea-49c4-8524-3dbbb3cde96e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1374.015568] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02acb037-50cb-4040-b1b3-be9755729981 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.035891] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1374.035891] env[63371]: value = "task-1773685" [ 1374.035891] env[63371]: _type = "Task" [ 1374.035891] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.044328] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773685, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.141732] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.141974] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.142162] env[63371]: DEBUG nova.network.neutron [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1374.148287] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "refresh_cache-e912c210-3ae1-47ce-b9cd-afebf6195606" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1374.148418] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "refresh_cache-e912c210-3ae1-47ce-b9cd-afebf6195606" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1374.148553] env[63371]: DEBUG nova.network.neutron [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1374.266031] env[63371]: DEBUG nova.scheduler.client.report [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 55 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1374.266031] env[63371]: DEBUG nova.compute.provider_tree [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 55 to 56 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1374.266031] env[63371]: DEBUG nova.compute.provider_tree [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1374.546640] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773685, 'name': ReconfigVM_Task, 'duration_secs': 0.285104} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1374.546921] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Reconfigured VM instance instance-00000014 to attach disk [datastore1] e0369f27-68ea-49c4-8524-3dbbb3cde96e/e0369f27-68ea-49c4-8524-3dbbb3cde96e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1374.547590] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de624123-bf3e-4460-b2e9-f8990944fdae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.553939] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1374.553939] env[63371]: value = "task-1773686" [ 1374.553939] env[63371]: _type = "Task" [ 1374.553939] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.562925] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773686, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1374.701818] env[63371]: DEBUG nova.network.neutron [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1374.770862] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.801s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1374.771387] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1374.774083] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.320s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1374.775468] env[63371]: INFO nova.compute.claims [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1374.985541] env[63371]: DEBUG nova.network.neutron [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Updating instance_info_cache with network_info: [{"id": "225db88c-9e6c-40e6-a30e-a3830f2c411c", "address": "fa:16:3e:77:6d:de", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap225db88c-9e", "ovs_interfaceid": "225db88c-9e6c-40e6-a30e-a3830f2c411c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.021052] env[63371]: DEBUG nova.network.neutron [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance_info_cache with network_info: [{"id": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "address": "fa:16:3e:39:29:ca", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eac2e62-a1", "ovs_interfaceid": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1375.064685] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773686, 'name': Rename_Task, 'duration_secs': 0.160065} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.064955] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1375.065223] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16b4e848-aa49-408f-a6a8-dcf25b060170 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.071447] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1375.071447] env[63371]: value = "task-1773687" [ 1375.071447] env[63371]: _type = "Task" [ 1375.071447] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.081341] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773687, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.280170] env[63371]: DEBUG nova.compute.utils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1375.283454] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1375.284882] env[63371]: DEBUG nova.network.neutron [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1375.332443] env[63371]: DEBUG nova.policy [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9adda22338e04c6da4b1d87790d42ebc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8126cc358323499680ab7423d7b6ce0d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1375.489295] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "refresh_cache-e912c210-3ae1-47ce-b9cd-afebf6195606" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.489295] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Instance network_info: |[{"id": "225db88c-9e6c-40e6-a30e-a3830f2c411c", "address": "fa:16:3e:77:6d:de", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap225db88c-9e", "ovs_interfaceid": "225db88c-9e6c-40e6-a30e-a3830f2c411c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1375.489592] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:6d:de', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '225db88c-9e6c-40e6-a30e-a3830f2c411c', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1375.498143] env[63371]: DEBUG oslo.service.loopingcall [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1375.498568] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1375.498846] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-996aa210-d2a2-4a40-96a0-8958dc3dff28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.519109] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1375.519109] env[63371]: value = "task-1773688" [ 1375.519109] env[63371]: _type = "Task" [ 1375.519109] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.523191] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1375.529818] env[63371]: DEBUG nova.compute.manager [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Received event network-changed-225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1375.529818] env[63371]: DEBUG nova.compute.manager [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Refreshing instance network info cache due to event network-changed-225db88c-9e6c-40e6-a30e-a3830f2c411c. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1375.529818] env[63371]: DEBUG oslo_concurrency.lockutils [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] Acquiring lock "refresh_cache-e912c210-3ae1-47ce-b9cd-afebf6195606" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1375.529818] env[63371]: DEBUG oslo_concurrency.lockutils [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] Acquired lock "refresh_cache-e912c210-3ae1-47ce-b9cd-afebf6195606" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.529818] env[63371]: DEBUG nova.network.neutron [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Refreshing network info cache for port 225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1375.535773] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773688, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.585624] env[63371]: DEBUG oslo_vmware.api [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773687, 'name': PowerOnVM_Task, 'duration_secs': 0.409806} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.585893] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1375.586204] env[63371]: INFO nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Took 7.49 seconds to spawn the instance on the hypervisor. [ 1375.586410] env[63371]: DEBUG nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1375.587327] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea1b242-8bd8-4826-a803-31ed9cd090fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.639229] env[63371]: DEBUG nova.network.neutron [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Successfully created port: fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1375.784450] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1376.030941] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773688, 'name': CreateVM_Task, 'duration_secs': 0.358494} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.031214] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1376.031975] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.031975] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.032298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1376.032548] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a6c450a-210c-449e-9dbf-f95fac3bb573 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.045778] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1376.045778] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd4ee2-3f7e-7c63-7bae-f2f034a82406" [ 1376.045778] env[63371]: _type = "Task" [ 1376.045778] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.054758] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd4ee2-3f7e-7c63-7bae-f2f034a82406, 'name': SearchDatastore_Task, 'duration_secs': 0.009746} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.055059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.055432] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1376.055508] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1376.055650] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1376.055811] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1376.056068] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b749bdd-ecf1-414c-b4dc-65e9210cfacb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.059012] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e198222-2520-458c-8051-64405961b18c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.084143] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8117bad0-c5c5-49cc-a134-5ecf7df404a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.086515] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1376.086729] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1376.087731] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d6c25fc-df95-4784-becd-0e6609ca4134 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.095313] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1376.095313] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ddd1c8-5ac0-0935-c61e-fb330c939864" [ 1376.095313] env[63371]: _type = "Task" [ 1376.095313] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.098263] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 83 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1376.113185] env[63371]: INFO nova.compute.manager [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Took 39.37 seconds to build instance. [ 1376.118291] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ddd1c8-5ac0-0935-c61e-fb330c939864, 'name': SearchDatastore_Task, 'duration_secs': 0.008507} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.119089] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c8707a3-b0bf-45b4-a2e5-e619cd602245 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.124307] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1376.124307] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d1704e-9f10-5b35-ee3a-8c545bdec294" [ 1376.124307] env[63371]: _type = "Task" [ 1376.124307] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.139459] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d1704e-9f10-5b35-ee3a-8c545bdec294, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.324230] env[63371]: DEBUG nova.network.neutron [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Updated VIF entry in instance network info cache for port 225db88c-9e6c-40e6-a30e-a3830f2c411c. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1376.324410] env[63371]: DEBUG nova.network.neutron [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Updating instance_info_cache with network_info: [{"id": "225db88c-9e6c-40e6-a30e-a3830f2c411c", "address": "fa:16:3e:77:6d:de", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap225db88c-9e", "ovs_interfaceid": "225db88c-9e6c-40e6-a30e-a3830f2c411c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.391455] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8400418-8101-469c-8e46-e8a7f5d4a28b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.399355] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64c9f14-c93d-41c5-b2a1-9fd0e652502a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.429607] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fb54c0-04e6-49a7-b9bd-4d8ebd68048e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.437088] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc255fd-5510-4344-bfab-e1bd8070555d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.450762] env[63371]: DEBUG nova.compute.provider_tree [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1376.613524] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1376.613879] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb7a82b2-d8aa-4ee2-9889-712ee32f32c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.616358] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5763b67b-eee8-49cd-bf34-db911a57f0e7 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.771s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1376.623549] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1376.623549] env[63371]: value = "task-1773689" [ 1376.623549] env[63371]: _type = "Task" [ 1376.623549] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.635891] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d1704e-9f10-5b35-ee3a-8c545bdec294, 'name': SearchDatastore_Task, 'duration_secs': 0.010266} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1376.639010] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.639274] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e912c210-3ae1-47ce-b9cd-afebf6195606/e912c210-3ae1-47ce-b9cd-afebf6195606.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1376.639569] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773689, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.639783] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7846f6e4-f035-4696-8474-4ccc592a7e51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.646168] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1376.646168] env[63371]: value = "task-1773690" [ 1376.646168] env[63371]: _type = "Task" [ 1376.646168] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.656470] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1376.799565] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1376.824595] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1376.824863] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1376.825023] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1376.825200] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1376.825385] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1376.825563] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1376.825782] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1376.825996] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1376.826234] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1376.826433] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1376.826652] env[63371]: DEBUG nova.virt.hardware [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1376.827243] env[63371]: DEBUG oslo_concurrency.lockutils [req-a123a85b-2e4a-40b0-8986-eaa0feb4baaf req-0888d830-62a2-4923-b601-aeca42294178 service nova] Releasing lock "refresh_cache-e912c210-3ae1-47ce-b9cd-afebf6195606" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1376.829013] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99ea9020-0152-4696-a4fa-cd2becfef1f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.837200] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2c3904-7781-4a75-85cd-e19f7d95575c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.954033] env[63371]: DEBUG nova.scheduler.client.report [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1377.119457] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1377.141167] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773689, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.159514] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773690, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.248952] env[63371]: DEBUG nova.network.neutron [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Successfully updated port: fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1377.459770] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.686s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.460325] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1377.462915] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.050s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.464372] env[63371]: INFO nova.compute.claims [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1377.552914] env[63371]: DEBUG nova.compute.manager [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-vif-plugged-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1377.553173] env[63371]: DEBUG oslo_concurrency.lockutils [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] Acquiring lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.553390] env[63371]: DEBUG oslo_concurrency.lockutils [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.553564] env[63371]: DEBUG oslo_concurrency.lockutils [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1377.553739] env[63371]: DEBUG nova.compute.manager [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] No waiting events found dispatching network-vif-plugged-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1377.553960] env[63371]: WARNING nova.compute.manager [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received unexpected event network-vif-plugged-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de for instance with vm_state building and task_state spawning. [ 1377.554240] env[63371]: DEBUG nova.compute.manager [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1377.554429] env[63371]: DEBUG nova.compute.manager [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing instance network info cache due to event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1377.554632] env[63371]: DEBUG oslo_concurrency.lockutils [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.554768] env[63371]: DEBUG oslo_concurrency.lockutils [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1377.554920] env[63371]: DEBUG nova.network.neutron [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1377.644718] env[63371]: DEBUG oslo_vmware.api [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1773689, 'name': PowerOnVM_Task, 'duration_secs': 0.788538} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.644718] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1377.644895] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a34b83a7-49b3-4975-9abc-8b5131c2f2dd tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance 'f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed' progress to 100 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1377.653815] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.661882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "7e66011a-4fed-471f-82ea-e1016f92ad39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1377.662125] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1377.662333] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.515289} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.662547] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e912c210-3ae1-47ce-b9cd-afebf6195606/e912c210-3ae1-47ce-b9cd-afebf6195606.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1377.662747] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1377.662993] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-834c27e8-ca6c-40be-9a5d-b700d5c5d325 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.670489] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1377.670489] env[63371]: value = "task-1773691" [ 1377.670489] env[63371]: _type = "Task" [ 1377.670489] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.681255] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773691, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.750558] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1377.968401] env[63371]: DEBUG nova.compute.utils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1377.972197] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1377.972371] env[63371]: DEBUG nova.network.neutron [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1378.011337] env[63371]: DEBUG nova.policy [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e7a7343e28c34bdbb36d36ef413a1968', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f4bffbe32a94e19a1dc4562f925ca9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1378.108980] env[63371]: DEBUG nova.network.neutron [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1378.187012] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773691, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060223} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.187301] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1378.190135] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099d893d-193e-44bb-9999-93fb6d712939 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.215885] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] e912c210-3ae1-47ce-b9cd-afebf6195606/e912c210-3ae1-47ce-b9cd-afebf6195606.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1378.216105] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b20fc6c-9f4f-4ce7-958e-efab13813338 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.237520] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1378.237520] env[63371]: value = "task-1773692" [ 1378.237520] env[63371]: _type = "Task" [ 1378.237520] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.246323] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773692, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.277947] env[63371]: DEBUG nova.network.neutron [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1378.410523] env[63371]: DEBUG nova.network.neutron [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Successfully created port: 62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1378.479467] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1378.750125] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773692, 'name': ReconfigVM_Task, 'duration_secs': 0.392522} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.750125] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Reconfigured VM instance instance-00000015 to attach disk [datastore1] e912c210-3ae1-47ce-b9cd-afebf6195606/e912c210-3ae1-47ce-b9cd-afebf6195606.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1378.750459] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d93fe62-269d-4b02-a3cd-0289b1840186 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.760041] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1378.760041] env[63371]: value = "task-1773693" [ 1378.760041] env[63371]: _type = "Task" [ 1378.760041] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.768471] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773693, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.780473] env[63371]: DEBUG oslo_concurrency.lockutils [req-4af47dce-632a-4e66-a3ae-399bb2f23603 req-14e56311-a558-4417-95b9-46820aceabce service nova] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1378.784544] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1378.784544] env[63371]: DEBUG nova.network.neutron [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1379.031521] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1022f47-b004-4db4-a744-a7bfaf7a2e50 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.039412] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a1060f-6608-4b22-bcf8-dff5312f06b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.069063] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae0cfc5-3277-483a-972b-5f38a52c1c12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.076017] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d24010-c99d-486d-8988-90d365f43c63 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.088810] env[63371]: DEBUG nova.compute.provider_tree [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1379.272240] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773693, 'name': Rename_Task, 'duration_secs': 0.186499} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.272696] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1379.273084] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e569e92-a954-4326-b6d9-d5b23daa656a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.281168] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1379.281168] env[63371]: value = "task-1773694" [ 1379.281168] env[63371]: _type = "Task" [ 1379.281168] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.295607] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773694, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.320969] env[63371]: DEBUG nova.network.neutron [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1379.461346] env[63371]: DEBUG nova.network.neutron [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1379.490250] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1379.526337] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1379.526337] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1379.526337] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1379.526580] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1379.526580] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1379.526580] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1379.526580] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1379.526580] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1379.526711] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1379.526711] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1379.526711] env[63371]: DEBUG nova.virt.hardware [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1379.527543] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98f3e7c-1273-4963-b98c-af3712cf92cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.535977] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d1f10a-753a-422a-9ff3-50aea1b0dbfc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.611473] env[63371]: ERROR nova.scheduler.client.report [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [req-160e8745-431f-4e3d-80c8-52e433163f60] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-160e8745-431f-4e3d-80c8-52e433163f60"}]} [ 1379.630469] env[63371]: DEBUG nova.scheduler.client.report [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1379.646083] env[63371]: DEBUG nova.scheduler.client.report [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1379.646532] env[63371]: DEBUG nova.compute.provider_tree [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1379.659187] env[63371]: DEBUG nova.scheduler.client.report [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1379.683790] env[63371]: DEBUG nova.scheduler.client.report [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1379.799467] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773694, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.956907] env[63371]: DEBUG nova.compute.manager [req-0def7ef1-8089-49df-8ffc-3639e1a6e862 req-e8feee55-7a2a-41ae-a578-5c272aad5a6f service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Received event network-vif-plugged-62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1379.957148] env[63371]: DEBUG oslo_concurrency.lockutils [req-0def7ef1-8089-49df-8ffc-3639e1a6e862 req-e8feee55-7a2a-41ae-a578-5c272aad5a6f service nova] Acquiring lock "af1281ba-c3be-43b4-a039-86d94bd9efe4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1379.957351] env[63371]: DEBUG oslo_concurrency.lockutils [req-0def7ef1-8089-49df-8ffc-3639e1a6e862 req-e8feee55-7a2a-41ae-a578-5c272aad5a6f service nova] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1379.957791] env[63371]: DEBUG oslo_concurrency.lockutils [req-0def7ef1-8089-49df-8ffc-3639e1a6e862 req-e8feee55-7a2a-41ae-a578-5c272aad5a6f service nova] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1379.958020] env[63371]: DEBUG nova.compute.manager [req-0def7ef1-8089-49df-8ffc-3639e1a6e862 req-e8feee55-7a2a-41ae-a578-5c272aad5a6f service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] No waiting events found dispatching network-vif-plugged-62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1379.958196] env[63371]: WARNING nova.compute.manager [req-0def7ef1-8089-49df-8ffc-3639e1a6e862 req-e8feee55-7a2a-41ae-a578-5c272aad5a6f service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Received unexpected event network-vif-plugged-62ec6b3f-aa36-49ba-ab5a-ce568c16837a for instance with vm_state building and task_state spawning. [ 1379.967927] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1379.968283] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Instance network_info: |[{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1379.968701] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fe:18:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b2ede0e6-8d7a-4018-bb37-25bf388e9867', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbd3a7d0-068b-4df5-be7f-d8bf5fe260de', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1379.976809] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Creating folder: Project (8126cc358323499680ab7423d7b6ce0d). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1379.979959] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b289688a-afc8-4d5e-b2af-5141a91d6025 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.990743] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Created folder: Project (8126cc358323499680ab7423d7b6ce0d) in parent group-v368199. [ 1379.990943] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Creating folder: Instances. Parent ref: group-v368262. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1379.991189] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-397c9165-f4f8-4975-aa05-dc38c22b97a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.002297] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Created folder: Instances in parent group-v368262. [ 1380.002537] env[63371]: DEBUG oslo.service.loopingcall [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1380.002721] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1380.002925] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a134ed5c-a64d-4feb-89e8-e382b937dd44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.031445] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1380.031445] env[63371]: value = "task-1773697" [ 1380.031445] env[63371]: _type = "Task" [ 1380.031445] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1380.042968] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773697, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.043875] env[63371]: DEBUG nova.network.neutron [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Successfully updated port: 62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1380.126125] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1380.126125] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1380.126125] env[63371]: DEBUG nova.compute.manager [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Going to confirm migration 1 {{(pid=63371) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1380.273740] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcc90e5a-6c81-48ce-964d-3a7736702eaa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.281346] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6244ee51-ee36-4c97-ac34-9363dc6cced9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.292700] env[63371]: DEBUG oslo_vmware.api [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773694, 'name': PowerOnVM_Task, 'duration_secs': 0.8812} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.320991] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1380.321621] env[63371]: INFO nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Took 7.32 seconds to spawn the instance on the hypervisor. [ 1380.321870] env[63371]: DEBUG nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1380.323301] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287fd274-70da-4130-a271-906ac9958af9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.326571] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a7535c-cdfc-40c4-b52d-c1d6e8556bf2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.335567] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de12ba3-18b0-46e1-aaf2-1f0879f9463e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1380.351973] env[63371]: DEBUG nova.compute.provider_tree [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1380.541666] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773697, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.551490] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "refresh_cache-af1281ba-c3be-43b4-a039-86d94bd9efe4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1380.551676] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquired lock "refresh_cache-af1281ba-c3be-43b4-a039-86d94bd9efe4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.552026] env[63371]: DEBUG nova.network.neutron [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1380.719385] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1380.719651] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1380.719799] env[63371]: DEBUG nova.network.neutron [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1380.719984] env[63371]: DEBUG nova.objects.instance [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lazy-loading 'info_cache' on Instance uuid f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1380.850700] env[63371]: INFO nova.compute.manager [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Took 42.25 seconds to build instance. [ 1380.884857] env[63371]: DEBUG nova.scheduler.client.report [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 57 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1380.885137] env[63371]: DEBUG nova.compute.provider_tree [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 57 to 58 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1380.885561] env[63371]: DEBUG nova.compute.provider_tree [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1381.044051] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773697, 'name': CreateVM_Task, 'duration_secs': 0.563332} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.044235] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1381.044919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.045091] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.045415] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1381.045668] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6393ae14-a05f-46bc-80ff-b1917e9ddc94 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.050328] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1381.050328] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52df2149-d719-bb17-8979-fba2c286b67c" [ 1381.050328] env[63371]: _type = "Task" [ 1381.050328] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.059021] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df2149-d719-bb17-8979-fba2c286b67c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.087106] env[63371]: DEBUG nova.network.neutron [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1381.244182] env[63371]: DEBUG nova.network.neutron [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Updating instance_info_cache with network_info: [{"id": "62ec6b3f-aa36-49ba-ab5a-ce568c16837a", "address": "fa:16:3e:ca:51:d0", "network": {"id": "2a5744d0-ee42-4b97-bf04-f0f41e3e2cdb", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-555494198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f4bffbe32a94e19a1dc4562f925ca9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ec6b3f-aa", "ovs_interfaceid": "62ec6b3f-aa36-49ba-ab5a-ce568c16837a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1381.352092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77636ff8-183a-402d-a310-7e6129e3ea98 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e912c210-3ae1-47ce-b9cd-afebf6195606" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.135s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.390032] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.927s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1381.390449] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1381.393518] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.769s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1381.394981] env[63371]: INFO nova.compute.claims [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1381.562928] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df2149-d719-bb17-8979-fba2c286b67c, 'name': SearchDatastore_Task, 'duration_secs': 0.024273} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1381.567017] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.567017] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1381.567017] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1381.567017] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1381.567347] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1381.567347] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1425632-9d78-4910-bb02-ee6ee5b0329c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.574453] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1381.574632] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1381.575372] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40a03bbb-935d-4f1a-b320-bbf37a2937cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.582654] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1381.582654] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528598e2-7fc4-0f56-ddf2-8993e0962d7d" [ 1381.582654] env[63371]: _type = "Task" [ 1381.582654] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.590423] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528598e2-7fc4-0f56-ddf2-8993e0962d7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.750668] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Releasing lock "refresh_cache-af1281ba-c3be-43b4-a039-86d94bd9efe4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1381.750931] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Instance network_info: |[{"id": "62ec6b3f-aa36-49ba-ab5a-ce568c16837a", "address": "fa:16:3e:ca:51:d0", "network": {"id": "2a5744d0-ee42-4b97-bf04-f0f41e3e2cdb", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-555494198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f4bffbe32a94e19a1dc4562f925ca9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ec6b3f-aa", "ovs_interfaceid": "62ec6b3f-aa36-49ba-ab5a-ce568c16837a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1381.751356] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:51:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '88651df2-0506-4f6c-b868-dd30a81f2b1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '62ec6b3f-aa36-49ba-ab5a-ce568c16837a', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1381.759782] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Creating folder: Project (6f4bffbe32a94e19a1dc4562f925ca9b). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1381.760067] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6bc4661b-a48f-4223-943d-9bdf6403e55c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.771594] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Created folder: Project (6f4bffbe32a94e19a1dc4562f925ca9b) in parent group-v368199. [ 1381.771822] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Creating folder: Instances. Parent ref: group-v368265. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1381.774254] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c1caa3b-c259-47bd-9f0d-06333e5ac019 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.783629] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Created folder: Instances in parent group-v368265. [ 1381.783809] env[63371]: DEBUG oslo.service.loopingcall [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1381.784031] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1381.784635] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30fbd51c-c5c3-4520-9001-367d4ab0bbb4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1381.803047] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1381.803047] env[63371]: value = "task-1773700" [ 1381.803047] env[63371]: _type = "Task" [ 1381.803047] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1381.811020] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773700, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1381.857021] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1381.899506] env[63371]: DEBUG nova.compute.utils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1381.903831] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1381.903831] env[63371]: DEBUG nova.network.neutron [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1382.003268] env[63371]: DEBUG nova.policy [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7cb0ef460c0e47ff89cf2b16c6e61933', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dea4a97b99c84f03a6098b321932dc8f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1382.025789] env[63371]: DEBUG nova.compute.manager [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Received event network-changed-62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1382.026461] env[63371]: DEBUG nova.compute.manager [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Refreshing instance network info cache due to event network-changed-62ec6b3f-aa36-49ba-ab5a-ce568c16837a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1382.026723] env[63371]: DEBUG oslo_concurrency.lockutils [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] Acquiring lock "refresh_cache-af1281ba-c3be-43b4-a039-86d94bd9efe4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.026873] env[63371]: DEBUG oslo_concurrency.lockutils [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] Acquired lock "refresh_cache-af1281ba-c3be-43b4-a039-86d94bd9efe4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.027049] env[63371]: DEBUG nova.network.neutron [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Refreshing network info cache for port 62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1382.044951] env[63371]: DEBUG nova.network.neutron [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance_info_cache with network_info: [{"id": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "address": "fa:16:3e:39:29:ca", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.42", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3eac2e62-a1", "ovs_interfaceid": "3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.095564] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528598e2-7fc4-0f56-ddf2-8993e0962d7d, 'name': SearchDatastore_Task, 'duration_secs': 0.009351} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.101470] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.101704] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1382.101887] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9741dea6-9022-45ce-8731-8098b1949d4e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.107769] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1382.107769] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ffb153-0b46-e681-ae23-84b76036ddad" [ 1382.107769] env[63371]: _type = "Task" [ 1382.107769] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.116320] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ffb153-0b46-e681-ae23-84b76036ddad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.312896] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773700, 'name': CreateVM_Task, 'duration_secs': 0.392761} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.313085] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1382.313760] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.313921] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.314267] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1382.315052] env[63371]: DEBUG nova.network.neutron [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Successfully created port: 00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1382.316733] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-534a5b3e-5786-46f3-8f2f-905e6266bc00 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.321930] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1382.321930] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52515e7f-2b9e-4408-f22f-bb17c80ec269" [ 1382.321930] env[63371]: _type = "Task" [ 1382.321930] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.329783] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52515e7f-2b9e-4408-f22f-bb17c80ec269, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.378991] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1382.403797] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1382.547289] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.547646] env[63371]: DEBUG nova.objects.instance [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lazy-loading 'migration_context' on Instance uuid f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1382.625920] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ffb153-0b46-e681-ae23-84b76036ddad, 'name': SearchDatastore_Task, 'duration_secs': 0.009505} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.626229] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.626486] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/713dfaf5-d11f-4af2-af92-66a596b0ed4a.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1382.626742] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-379853ac-ad00-4ea9-8b42-9a998e632deb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.636783] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1382.636783] env[63371]: value = "task-1773701" [ 1382.636783] env[63371]: _type = "Task" [ 1382.636783] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.649799] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773701, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1382.832792] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52515e7f-2b9e-4408-f22f-bb17c80ec269, 'name': SearchDatastore_Task, 'duration_secs': 0.008608} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1382.837830] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1382.838120] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1382.838358] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1382.838504] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1382.838678] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1382.839218] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-049e94b9-6164-4ec3-a856-859d1ceeab28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.855185] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1382.855387] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1382.856188] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-227c0cfc-3e82-487b-8d12-cbf6b9f6109b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1382.860970] env[63371]: DEBUG nova.network.neutron [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Updated VIF entry in instance network info cache for port 62ec6b3f-aa36-49ba-ab5a-ce568c16837a. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1382.861341] env[63371]: DEBUG nova.network.neutron [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Updating instance_info_cache with network_info: [{"id": "62ec6b3f-aa36-49ba-ab5a-ce568c16837a", "address": "fa:16:3e:ca:51:d0", "network": {"id": "2a5744d0-ee42-4b97-bf04-f0f41e3e2cdb", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-555494198-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f4bffbe32a94e19a1dc4562f925ca9b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "88651df2-0506-4f6c-b868-dd30a81f2b1c", "external-id": "nsx-vlan-transportzone-366", "segmentation_id": 366, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap62ec6b3f-aa", "ovs_interfaceid": "62ec6b3f-aa36-49ba-ab5a-ce568c16837a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1382.867480] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1382.867480] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b6c86b-8188-3a74-e8d3-213980c8c571" [ 1382.867480] env[63371]: _type = "Task" [ 1382.867480] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1382.877974] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b6c86b-8188-3a74-e8d3-213980c8c571, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.050267] env[63371]: DEBUG nova.objects.base [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1383.051225] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92959437-188b-444c-99e6-55c89137780b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.054920] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7716f675-da37-4f55-b19e-ae975ade4322 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.079780] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da375159-aa48-4a6c-b632-31c269875a5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.082592] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f493ed0b-9eb3-4eda-8c99-4bb684d565b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.088958] env[63371]: DEBUG oslo_vmware.api [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1383.088958] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cfbd82-e746-7d20-5ad2-709af40da65e" [ 1383.088958] env[63371]: _type = "Task" [ 1383.088958] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.118315] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b19d7611-3478-4b36-aa84-a98b41638d83 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.128851] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ad61c9-9861-4923-82dc-2ccb3d7ba51a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.135445] env[63371]: DEBUG oslo_vmware.api [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cfbd82-e746-7d20-5ad2-709af40da65e, 'name': SearchDatastore_Task, 'duration_secs': 0.030788} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.135445] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1383.145008] env[63371]: DEBUG nova.compute.provider_tree [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1383.154516] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773701, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501935} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.154829] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/713dfaf5-d11f-4af2-af92-66a596b0ed4a.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1383.154985] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1383.155263] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8fdc0aa-609c-4dfd-9a61-7d60721890af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.161702] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1383.161702] env[63371]: value = "task-1773702" [ 1383.161702] env[63371]: _type = "Task" [ 1383.161702] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.172056] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773702, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.369056] env[63371]: DEBUG oslo_concurrency.lockutils [req-12382289-f84b-4e65-9c0e-914a1eac4bd5 req-9c123cc5-de72-4780-ad15-2101d645c051 service nova] Releasing lock "refresh_cache-af1281ba-c3be-43b4-a039-86d94bd9efe4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.379720] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b6c86b-8188-3a74-e8d3-213980c8c571, 'name': SearchDatastore_Task, 'duration_secs': 0.056461} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.380609] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17a9d415-5dba-480a-a598-f6017539cfcb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.386337] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1383.386337] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52454de6-f0c8-1310-9fc2-6129d145d232" [ 1383.386337] env[63371]: _type = "Task" [ 1383.386337] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.396671] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52454de6-f0c8-1310-9fc2-6129d145d232, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.418428] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1383.446172] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1383.446422] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1383.446575] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1383.446756] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1383.446903] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1383.447070] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1383.447253] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1383.447405] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1383.447645] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1383.447809] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1383.447973] env[63371]: DEBUG nova.virt.hardware [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1383.448836] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353c4a01-6bf2-44be-b297-c0bc6043d1d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.456742] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335324ef-0771-4f67-9910-21f69cf8edab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.650939] env[63371]: DEBUG nova.scheduler.client.report [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1383.671895] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773702, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074267} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.672127] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1383.672916] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35de7df-6c44-4376-82ac-4448e4237c1f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.696987] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/713dfaf5-d11f-4af2-af92-66a596b0ed4a.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1383.697577] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a5ec5d3-b89d-4f71-92ae-3b1cd90ebcc1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.718456] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1383.718456] env[63371]: value = "task-1773703" [ 1383.718456] env[63371]: _type = "Task" [ 1383.718456] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.727893] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773703, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1383.896655] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52454de6-f0c8-1310-9fc2-6129d145d232, 'name': SearchDatastore_Task, 'duration_secs': 0.017155} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1383.896923] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1383.897192] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] af1281ba-c3be-43b4-a039-86d94bd9efe4/af1281ba-c3be-43b4-a039-86d94bd9efe4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1383.897949] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-83b9189a-270b-4a4a-a9a0-2d2588abaea8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.904026] env[63371]: DEBUG nova.network.neutron [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Successfully updated port: 00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1383.906793] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1383.906793] env[63371]: value = "task-1773704" [ 1383.906793] env[63371]: _type = "Task" [ 1383.906793] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.914097] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773704, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.118234] env[63371]: DEBUG nova.compute.manager [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Received event network-vif-plugged-00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1384.118469] env[63371]: DEBUG oslo_concurrency.lockutils [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] Acquiring lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1384.118697] env[63371]: DEBUG oslo_concurrency.lockutils [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.118899] env[63371]: DEBUG oslo_concurrency.lockutils [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.119075] env[63371]: DEBUG nova.compute.manager [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] No waiting events found dispatching network-vif-plugged-00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1384.119261] env[63371]: WARNING nova.compute.manager [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Received unexpected event network-vif-plugged-00f821d3-2f0a-46f0-9551-f7eefb581c66 for instance with vm_state building and task_state spawning. [ 1384.119382] env[63371]: DEBUG nova.compute.manager [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Received event network-changed-00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1384.119528] env[63371]: DEBUG nova.compute.manager [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Refreshing instance network info cache due to event network-changed-00f821d3-2f0a-46f0-9551-f7eefb581c66. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1384.119747] env[63371]: DEBUG oslo_concurrency.lockutils [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] Acquiring lock "refresh_cache-3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.119880] env[63371]: DEBUG oslo_concurrency.lockutils [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] Acquired lock "refresh_cache-3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1384.120062] env[63371]: DEBUG nova.network.neutron [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Refreshing network info cache for port 00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1384.155890] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.762s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1384.156403] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1384.160450] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.891s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1384.160648] env[63371]: DEBUG nova.objects.instance [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lazy-loading 'resources' on Instance uuid a43fed87-5205-4148-834e-66778a90b7bc {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1384.228662] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773703, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.406936] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "refresh_cache-3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1384.419176] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773704, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.652843] env[63371]: DEBUG nova.network.neutron [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1384.664424] env[63371]: DEBUG nova.compute.utils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1384.665808] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1384.665908] env[63371]: DEBUG nova.network.neutron [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1384.731305] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773703, 'name': ReconfigVM_Task, 'duration_secs': 0.905326} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.731305] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/713dfaf5-d11f-4af2-af92-66a596b0ed4a.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1384.734030] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0cf6b472-9cf3-4a6b-81c1-dd343456589d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.739365] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1384.739365] env[63371]: value = "task-1773705" [ 1384.739365] env[63371]: _type = "Task" [ 1384.739365] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.748402] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773705, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.787326] env[63371]: DEBUG nova.policy [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a258c38635014fdf9c6e3907bda2fd03', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a884a9d1a3ae410b858851431c166183', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1384.827587] env[63371]: DEBUG nova.network.neutron [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1384.920595] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773704, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.706564} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.920900] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] af1281ba-c3be-43b4-a039-86d94bd9efe4/af1281ba-c3be-43b4-a039-86d94bd9efe4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1384.921344] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1384.921642] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e674894c-792c-4a38-b50c-0cd5cdbfe1e7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.930455] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1384.930455] env[63371]: value = "task-1773706" [ 1384.930455] env[63371]: _type = "Task" [ 1384.930455] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.942125] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773706, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.172578] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1385.252771] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773705, 'name': Rename_Task, 'duration_secs': 0.221526} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.252771] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1385.252771] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7e51c8e-a3c6-4ba9-9438-81c347b168f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.256874] env[63371]: DEBUG nova.network.neutron [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Successfully created port: 3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1385.265185] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1385.265185] env[63371]: value = "task-1773707" [ 1385.265185] env[63371]: _type = "Task" [ 1385.265185] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.277197] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773707, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.281037] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f61e4b-76cb-42d8-9504-c3f9837916bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.291021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ee3973-e752-490e-bbc0-c7022c58596b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.321954] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748d7296-ef5f-4d6f-a188-00414fbe3144 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.328900] env[63371]: DEBUG oslo_concurrency.lockutils [req-0eaa2978-26ec-4847-9a38-4f5d095eab9a req-4a1cf5d5-56b7-4f94-aa76-d6adfa910dc1 service nova] Releasing lock "refresh_cache-3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1385.330276] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b853753-dcac-4b20-8ab9-529b7167d978 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.334164] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquired lock "refresh_cache-3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1385.334327] env[63371]: DEBUG nova.network.neutron [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1385.345397] env[63371]: DEBUG nova.compute.provider_tree [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1385.440861] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773706, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069056} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.441285] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1385.442124] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239baa92-9a9a-44b0-a050-5b506666b132 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.464531] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] af1281ba-c3be-43b4-a039-86d94bd9efe4/af1281ba-c3be-43b4-a039-86d94bd9efe4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1385.464892] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55b398fa-252e-40ea-85bc-e7fadce0c885 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.484214] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1385.484214] env[63371]: value = "task-1773708" [ 1385.484214] env[63371]: _type = "Task" [ 1385.484214] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1385.493780] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773708, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.775300] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773707, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.850251] env[63371]: DEBUG nova.scheduler.client.report [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1385.876066] env[63371]: DEBUG nova.network.neutron [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1385.994794] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773708, 'name': ReconfigVM_Task, 'duration_secs': 0.454287} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.995079] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Reconfigured VM instance instance-00000017 to attach disk [datastore1] af1281ba-c3be-43b4-a039-86d94bd9efe4/af1281ba-c3be-43b4-a039-86d94bd9efe4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1385.995759] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39c3d55f-99e1-47f7-8ff1-828e5ada22de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.002799] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1386.002799] env[63371]: value = "task-1773709" [ 1386.002799] env[63371]: _type = "Task" [ 1386.002799] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.014597] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773709, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.071382] env[63371]: DEBUG nova.network.neutron [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Updating instance_info_cache with network_info: [{"id": "00f821d3-2f0a-46f0-9551-f7eefb581c66", "address": "fa:16:3e:f0:75:64", "network": {"id": "19de600e-a1b8-4d10-9f47-0d72c8817654", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-764261102-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dea4a97b99c84f03a6098b321932dc8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00f821d3-2f", "ovs_interfaceid": "00f821d3-2f0a-46f0-9551-f7eefb581c66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.185951] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1386.219965] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1386.220133] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1386.220244] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1386.220563] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1386.220563] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1386.220685] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1386.222012] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1386.222012] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1386.222012] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1386.222012] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1386.222012] env[63371]: DEBUG nova.virt.hardware [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1386.222442] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c094e4-58bd-4e29-b71a-ede3a565487b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.230329] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8212f371-1b32-43a3-b523-2c41708700df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.278155] env[63371]: DEBUG oslo_vmware.api [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773707, 'name': PowerOnVM_Task, 'duration_secs': 1.011968} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.278155] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1386.278155] env[63371]: INFO nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Took 9.48 seconds to spawn the instance on the hypervisor. [ 1386.278155] env[63371]: DEBUG nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1386.278628] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fafa185-f6f2-4498-bc2a-d5c26750d70e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.355882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.195s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.358834] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.358834] env[63371]: DEBUG nova.objects.instance [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lazy-loading 'resources' on Instance uuid 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1386.382115] env[63371]: INFO nova.scheduler.client.report [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Deleted allocations for instance a43fed87-5205-4148-834e-66778a90b7bc [ 1386.513227] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773709, 'name': Rename_Task, 'duration_secs': 0.136223} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1386.513563] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1386.513846] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-63cb267f-0aad-4105-90fc-4830d5c771d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.520108] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1386.520108] env[63371]: value = "task-1773710" [ 1386.520108] env[63371]: _type = "Task" [ 1386.520108] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.528339] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773710, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.571211] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Releasing lock "refresh_cache-3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1386.571577] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Instance network_info: |[{"id": "00f821d3-2f0a-46f0-9551-f7eefb581c66", "address": "fa:16:3e:f0:75:64", "network": {"id": "19de600e-a1b8-4d10-9f47-0d72c8817654", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-764261102-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dea4a97b99c84f03a6098b321932dc8f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6edb8eae-1113-49d0-84f7-9fd9f82b26fb", "external-id": "nsx-vlan-transportzone-493", "segmentation_id": 493, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00f821d3-2f", "ovs_interfaceid": "00f821d3-2f0a-46f0-9551-f7eefb581c66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1386.572031] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:75:64', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6edb8eae-1113-49d0-84f7-9fd9f82b26fb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00f821d3-2f0a-46f0-9551-f7eefb581c66', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1386.581170] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Creating folder: Project (dea4a97b99c84f03a6098b321932dc8f). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1386.581864] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a12f2c70-51ae-44ba-ad46-7b6295d44637 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.592487] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Created folder: Project (dea4a97b99c84f03a6098b321932dc8f) in parent group-v368199. [ 1386.593017] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Creating folder: Instances. Parent ref: group-v368268. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1386.593017] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-392fc642-e09f-478e-b828-8fc0f826a1a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.602563] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Created folder: Instances in parent group-v368268. [ 1386.602822] env[63371]: DEBUG oslo.service.loopingcall [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1386.603237] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1386.603471] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ec37b25-a333-48c6-868e-9f5398a636b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.625352] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1386.625352] env[63371]: value = "task-1773713" [ 1386.625352] env[63371]: _type = "Task" [ 1386.625352] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1386.633465] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773713, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1386.795549] env[63371]: INFO nova.compute.manager [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Took 47.12 seconds to build instance. [ 1386.867873] env[63371]: DEBUG nova.compute.manager [req-0ede0b9b-5e08-4014-b6d6-821d268b6f2d req-7dcfcc4d-d548-4ff4-b941-d2939ad622a9 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Received event network-vif-plugged-3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1386.868104] env[63371]: DEBUG oslo_concurrency.lockutils [req-0ede0b9b-5e08-4014-b6d6-821d268b6f2d req-7dcfcc4d-d548-4ff4-b941-d2939ad622a9 service nova] Acquiring lock "76c861a7-30f2-40f4-b723-7912975f36f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1386.868309] env[63371]: DEBUG oslo_concurrency.lockutils [req-0ede0b9b-5e08-4014-b6d6-821d268b6f2d req-7dcfcc4d-d548-4ff4-b941-d2939ad622a9 service nova] Lock "76c861a7-30f2-40f4-b723-7912975f36f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1386.868472] env[63371]: DEBUG oslo_concurrency.lockutils [req-0ede0b9b-5e08-4014-b6d6-821d268b6f2d req-7dcfcc4d-d548-4ff4-b941-d2939ad622a9 service nova] Lock "76c861a7-30f2-40f4-b723-7912975f36f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1386.868643] env[63371]: DEBUG nova.compute.manager [req-0ede0b9b-5e08-4014-b6d6-821d268b6f2d req-7dcfcc4d-d548-4ff4-b941-d2939ad622a9 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] No waiting events found dispatching network-vif-plugged-3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1386.868793] env[63371]: WARNING nova.compute.manager [req-0ede0b9b-5e08-4014-b6d6-821d268b6f2d req-7dcfcc4d-d548-4ff4-b941-d2939ad622a9 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Received unexpected event network-vif-plugged-3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 for instance with vm_state building and task_state spawning. [ 1386.891249] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9c4eb1d2-fa2b-49ab-a3f7-4524b61b585c tempest-FloatingIPsAssociationNegativeTestJSON-2341806 tempest-FloatingIPsAssociationNegativeTestJSON-2341806-project-member] Lock "a43fed87-5205-4148-834e-66778a90b7bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.707s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.034760] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773710, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.082188] env[63371]: DEBUG nova.network.neutron [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Successfully updated port: 3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1387.136117] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773713, 'name': CreateVM_Task, 'duration_secs': 0.395743} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.138719] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1387.139696] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.140015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.141377] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1387.141377] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50bf8dce-7d97-4c62-b65f-6659209fc7f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.146027] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1387.146027] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5289981c-efb9-7280-a166-9a5676f68a75" [ 1387.146027] env[63371]: _type = "Task" [ 1387.146027] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.155291] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5289981c-efb9-7280-a166-9a5676f68a75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.298292] env[63371]: DEBUG oslo_concurrency.lockutils [None req-104f4570-31fd-4c92-8d81-6f9627459bdc tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.349s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1387.481181] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08755a23-bc05-4d17-86c7-8da3b5696d2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.488576] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c311c8-a211-4af6-aa01-6864a9c239cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.520562] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6225d00-157e-423f-acac-d60f3dfb4a01 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.534310] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513dea5a-385a-43c2-9894-75ac40eaccbb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.537548] env[63371]: DEBUG oslo_vmware.api [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773710, 'name': PowerOnVM_Task, 'duration_secs': 0.618872} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.539019] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1387.539019] env[63371]: INFO nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Took 8.05 seconds to spawn the instance on the hypervisor. [ 1387.539019] env[63371]: DEBUG nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1387.539341] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519e27c1-50a3-49f5-8ed1-ba998095e05c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.551814] env[63371]: DEBUG nova.compute.provider_tree [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.586873] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "refresh_cache-76c861a7-30f2-40f4-b723-7912975f36f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.586873] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired lock "refresh_cache-76c861a7-30f2-40f4-b723-7912975f36f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.586873] env[63371]: DEBUG nova.network.neutron [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1387.667164] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5289981c-efb9-7280-a166-9a5676f68a75, 'name': SearchDatastore_Task, 'duration_secs': 0.013742} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1387.667164] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1387.667164] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1387.667164] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.667451] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.667451] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1387.667632] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5f53aed-7da8-434a-9afa-46b4a5666652 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.682337] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1387.682623] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1387.683357] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92f912b7-8802-4089-97ce-bdcf5c050caf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.688799] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1387.688799] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526234a1-49f4-57c2-075c-8b190191cb64" [ 1387.688799] env[63371]: _type = "Task" [ 1387.688799] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1387.697166] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526234a1-49f4-57c2-075c-8b190191cb64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1387.800221] env[63371]: INFO nova.compute.manager [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Rescuing [ 1387.800788] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1387.800959] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1387.801177] env[63371]: DEBUG nova.network.neutron [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1387.802488] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1388.057515] env[63371]: DEBUG nova.scheduler.client.report [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1388.069392] env[63371]: INFO nova.compute.manager [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Took 45.65 seconds to build instance. [ 1388.127386] env[63371]: DEBUG nova.network.neutron [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1388.212043] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526234a1-49f4-57c2-075c-8b190191cb64, 'name': SearchDatastore_Task, 'duration_secs': 0.026288} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.213392] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc575a20-c1c5-4337-8761-35a99cc243da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.224256] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1388.224256] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521bd231-63ff-4520-1421-540075498cbe" [ 1388.224256] env[63371]: _type = "Task" [ 1388.224256] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.236222] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521bd231-63ff-4520-1421-540075498cbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.329091] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1388.351435] env[63371]: DEBUG nova.network.neutron [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Updating instance_info_cache with network_info: [{"id": "3c5c963f-1c9c-4d03-bb01-5670b9fe06b4", "address": "fa:16:3e:24:0c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5c963f-1c", "ovs_interfaceid": "3c5c963f-1c9c-4d03-bb01-5670b9fe06b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.567754] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.209s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.572596] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.549s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1388.575109] env[63371]: INFO nova.compute.claims [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1388.579116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e00b11e-eb7f-4d62-a548-c1a429f4ce71 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.706s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.597392] env[63371]: INFO nova.scheduler.client.report [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Deleted allocations for instance 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d [ 1388.700257] env[63371]: DEBUG nova.network.neutron [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1388.738268] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521bd231-63ff-4520-1421-540075498cbe, 'name': SearchDatastore_Task, 'duration_secs': 0.015793} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1388.738268] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.738268] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406/3f79bc3e-4dd4-4b5f-a5ba-a17124e70406.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1388.738268] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58f0be02-5761-4174-b054-aac7d406d5f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.746633] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1388.746633] env[63371]: value = "task-1773714" [ 1388.746633] env[63371]: _type = "Task" [ 1388.746633] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.758434] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1388.853591] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Releasing lock "refresh_cache-76c861a7-30f2-40f4-b723-7912975f36f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1388.854020] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Instance network_info: |[{"id": "3c5c963f-1c9c-4d03-bb01-5670b9fe06b4", "address": "fa:16:3e:24:0c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5c963f-1c", "ovs_interfaceid": "3c5c963f-1c9c-4d03-bb01-5670b9fe06b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1388.854495] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:24:0c:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c5c963f-1c9c-4d03-bb01-5670b9fe06b4', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1388.862753] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating folder: Project (a884a9d1a3ae410b858851431c166183). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1388.863460] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8e5c94c4-fcde-411e-9ed5-b0e214e4fdc9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.874404] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Created folder: Project (a884a9d1a3ae410b858851431c166183) in parent group-v368199. [ 1388.875812] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating folder: Instances. Parent ref: group-v368271. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1388.876164] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfad2b05-2aa4-448b-8d6d-442045f9507c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.886621] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Created folder: Instances in parent group-v368271. [ 1388.886621] env[63371]: DEBUG oslo.service.loopingcall [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1388.886807] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1388.887021] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b1cc79c8-482a-4264-a99f-fd99725b5beb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1388.906463] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1388.906463] env[63371]: value = "task-1773717" [ 1388.906463] env[63371]: _type = "Task" [ 1388.906463] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1388.916118] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773717, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.083615] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1389.114026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86d50bee-50d6-4076-b7ab-27c12451f14c tempest-ServersNegativeTestMultiTenantJSON-1053627231 tempest-ServersNegativeTestMultiTenantJSON-1053627231-project-member] Lock "4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.347s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1389.203241] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1389.258760] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773714, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.280295] env[63371]: DEBUG nova.compute.manager [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Received event network-changed-3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1389.280295] env[63371]: DEBUG nova.compute.manager [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Refreshing instance network info cache due to event network-changed-3c5c963f-1c9c-4d03-bb01-5670b9fe06b4. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1389.280295] env[63371]: DEBUG oslo_concurrency.lockutils [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] Acquiring lock "refresh_cache-76c861a7-30f2-40f4-b723-7912975f36f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.280852] env[63371]: DEBUG oslo_concurrency.lockutils [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] Acquired lock "refresh_cache-76c861a7-30f2-40f4-b723-7912975f36f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.280852] env[63371]: DEBUG nova.network.neutron [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Refreshing network info cache for port 3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1389.282436] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "interface-af1281ba-c3be-43b4-a039-86d94bd9efe4-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.282643] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "interface-af1281ba-c3be-43b4-a039-86d94bd9efe4-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1389.282932] env[63371]: DEBUG nova.objects.instance [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lazy-loading 'flavor' on Instance uuid af1281ba-c3be-43b4-a039-86d94bd9efe4 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1389.416860] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773717, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.616884] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1389.743116] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1389.743413] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a401458-14eb-403a-9427-b2603e0194bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.758853] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1389.758853] env[63371]: value = "task-1773718" [ 1389.758853] env[63371]: _type = "Task" [ 1389.758853] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.767258] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773714, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.534661} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.768048] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406/3f79bc3e-4dd4-4b5f-a5ba-a17124e70406.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1389.768279] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1389.770887] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1b1cadb7-017c-41bf-a6a5-7fc100773278 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.776627] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773718, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.781930] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1389.781930] env[63371]: value = "task-1773719" [ 1389.781930] env[63371]: _type = "Task" [ 1389.781930] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.793634] env[63371]: DEBUG nova.objects.instance [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lazy-loading 'pci_requests' on Instance uuid af1281ba-c3be-43b4-a039-86d94bd9efe4 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1389.800339] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773719, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1389.918771] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773717, 'name': CreateVM_Task, 'duration_secs': 0.797774} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1389.919146] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1389.919960] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1389.920372] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1389.920709] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1389.921097] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-719d3ee0-f1bb-419d-b1c0-c4e84d9a7046 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1389.930112] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1389.930112] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5257b475-dcb9-d7ee-63c1-32587bc56ede" [ 1389.930112] env[63371]: _type = "Task" [ 1389.930112] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1389.936749] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5257b475-dcb9-d7ee-63c1-32587bc56ede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.015188] env[63371]: DEBUG nova.network.neutron [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Updated VIF entry in instance network info cache for port 3c5c963f-1c9c-4d03-bb01-5670b9fe06b4. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1390.015539] env[63371]: DEBUG nova.network.neutron [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Updating instance_info_cache with network_info: [{"id": "3c5c963f-1c9c-4d03-bb01-5670b9fe06b4", "address": "fa:16:3e:24:0c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c5c963f-1c", "ovs_interfaceid": "3c5c963f-1c9c-4d03-bb01-5670b9fe06b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1390.183135] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a86db3-efdb-4349-8383-8bbfa90cd712 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.191854] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d6a272-2d1f-439b-9d2e-1b4f1b1864ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.229171] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9eb569f-359e-4931-9afd-d984a79aba5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.238056] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6b6ebb-524b-497a-97c6-42e29c005c66 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.254437] env[63371]: DEBUG nova.compute.provider_tree [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1390.268949] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773718, 'name': PowerOffVM_Task, 'duration_secs': 0.245299} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.269212] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1390.270018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159754af-0c86-4fd4-a857-3912d02c7d1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.293496] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aceb2038-cfb4-493b-add3-e83e7d4e86c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.298069] env[63371]: DEBUG nova.objects.base [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1390.298069] env[63371]: DEBUG nova.network.neutron [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1390.302998] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773719, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071083} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.305309] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1390.307798] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64cb22d-dfbe-4792-b457-8e575356e120 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.330210] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406/3f79bc3e-4dd4-4b5f-a5ba-a17124e70406.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1390.330505] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dc18440-b284-4931-9097-c64e692e062e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.352511] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1390.352511] env[63371]: value = "task-1773720" [ 1390.352511] env[63371]: _type = "Task" [ 1390.352511] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.354371] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1390.354693] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d4e865e-9e89-4bdc-8c6f-689caf5a6aaa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.365896] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773720, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.367157] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1390.367157] env[63371]: value = "task-1773721" [ 1390.367157] env[63371]: _type = "Task" [ 1390.367157] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.376237] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1390.376552] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1390.376844] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.377039] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1390.377255] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1390.377532] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e34530bb-c18a-412a-a86f-bf8ce2b9c66c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.384996] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1390.385439] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1390.385928] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee1f1c36-05f6-44b4-962f-38dbaee14679 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.391921] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1390.391921] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52558f94-3df3-51c2-d4b4-22573e48660c" [ 1390.391921] env[63371]: _type = "Task" [ 1390.391921] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.398781] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52558f94-3df3-51c2-d4b4-22573e48660c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.427811] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f951745e-b335-4f4d-927e-2e29f57293f4 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "interface-af1281ba-c3be-43b4-a039-86d94bd9efe4-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.145s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1390.439498] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5257b475-dcb9-d7ee-63c1-32587bc56ede, 'name': SearchDatastore_Task, 'duration_secs': 0.018911} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.440478] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.440759] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1390.441085] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1390.518061] env[63371]: DEBUG oslo_concurrency.lockutils [req-63779800-68bd-4ad8-bf2b-6cdacbf12669 req-24240d9c-4ebb-4a83-93ce-284556c8fbb1 service nova] Releasing lock "refresh_cache-76c861a7-30f2-40f4-b723-7912975f36f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1390.758914] env[63371]: DEBUG nova.scheduler.client.report [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1390.864394] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773720, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1390.903055] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52558f94-3df3-51c2-d4b4-22573e48660c, 'name': SearchDatastore_Task, 'duration_secs': 0.041584} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1390.903780] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4a5eed4-4915-40ba-a7ef-9a84dac974be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.911129] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1390.911129] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52344555-299a-ff83-35c8-98e556a26929" [ 1390.911129] env[63371]: _type = "Task" [ 1390.911129] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1390.924042] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52344555-299a-ff83-35c8-98e556a26929, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.267992] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.696s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1391.268421] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1391.276438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.667s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1391.276438] env[63371]: INFO nova.compute.claims [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1391.364241] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773720, 'name': ReconfigVM_Task, 'duration_secs': 0.791229} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.364401] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406/3f79bc3e-4dd4-4b5f-a5ba-a17124e70406.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1391.365417] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac784563-a876-4237-a37e-199a6ea68ab2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.373871] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1391.373871] env[63371]: value = "task-1773722" [ 1391.373871] env[63371]: _type = "Task" [ 1391.373871] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.382683] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773722, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.423034] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52344555-299a-ff83-35c8-98e556a26929, 'name': SearchDatastore_Task, 'duration_secs': 0.034389} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.423437] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.423720] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. {{(pid=63371) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1391.424064] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1391.424308] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1391.424592] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a172851f-04a2-417c-a097-0ecf3e2d7091 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.427408] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22829eb9-2e65-4cb9-92c5-1aa4bead7f11 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.435533] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1391.435533] env[63371]: value = "task-1773723" [ 1391.435533] env[63371]: _type = "Task" [ 1391.435533] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.436914] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1391.437643] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1391.445815] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-687950c9-5ce3-412e-a938-64576b8ade64 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.458009] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1391.458009] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d839a5-99e5-d715-a727-d5e91609c733" [ 1391.458009] env[63371]: _type = "Task" [ 1391.458009] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.458343] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773723, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.467482] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d839a5-99e5-d715-a727-d5e91609c733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.783279] env[63371]: DEBUG nova.compute.utils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1391.788146] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1391.788146] env[63371]: DEBUG nova.network.neutron [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1391.888957] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773722, 'name': Rename_Task, 'duration_secs': 0.145421} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.889342] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1391.890916] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29bef8a8-342d-4aa6-8964-f441e8fbad28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.899869] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1391.899869] env[63371]: value = "task-1773724" [ 1391.899869] env[63371]: _type = "Task" [ 1391.899869] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.909690] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773724, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.922809] env[63371]: DEBUG nova.policy [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '15eb038ed0dc4c9d9f948d154c244a32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bf723c611d61478cbb81b2bc474a74f4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1391.950539] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773723, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1391.970381] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d839a5-99e5-d715-a727-d5e91609c733, 'name': SearchDatastore_Task, 'duration_secs': 0.019036} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.971383] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-035ac7b3-9b0b-48a8-895f-59c12287b15f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.976557] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1391.976557] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5270d4b2-80d2-d1d6-f862-4fc90e819147" [ 1391.976557] env[63371]: _type = "Task" [ 1391.976557] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1391.995162] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5270d4b2-80d2-d1d6-f862-4fc90e819147, 'name': SearchDatastore_Task, 'duration_secs': 0.010898} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1391.997463] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1391.997780] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 76c861a7-30f2-40f4-b723-7912975f36f8/76c861a7-30f2-40f4-b723-7912975f36f8.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1391.998039] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5aafbd75-a313-4e22-8027-a7ac4dae1c79 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.005874] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1392.005874] env[63371]: value = "task-1773725" [ 1392.005874] env[63371]: _type = "Task" [ 1392.005874] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.014994] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773725, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.288518] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1392.419026] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773724, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.452294] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773723, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531649} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.452579] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. [ 1392.453455] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ad8e3e-deb0-49ab-a81e-87eb9fe43013 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.487029] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1392.489279] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f85f40b-263c-4dc4-b734-8d94cd59a128 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.518916] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773725, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507467} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.518916] env[63371]: DEBUG nova.network.neutron [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Successfully created port: 5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1392.523930] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 76c861a7-30f2-40f4-b723-7912975f36f8/76c861a7-30f2-40f4-b723-7912975f36f8.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1392.524163] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1392.524464] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1392.524464] env[63371]: value = "task-1773726" [ 1392.524464] env[63371]: _type = "Task" [ 1392.524464] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.527250] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1007a94c-a1f8-4cb0-b9a1-e3a82360c2e4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.538929] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773726, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.540379] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1392.540379] env[63371]: value = "task-1773727" [ 1392.540379] env[63371]: _type = "Task" [ 1392.540379] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.558162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "af1281ba-c3be-43b4-a039-86d94bd9efe4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.558492] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.558758] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "af1281ba-c3be-43b4-a039-86d94bd9efe4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1392.559057] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1392.559296] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1392.562618] env[63371]: INFO nova.compute.manager [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Terminating instance [ 1392.568346] env[63371]: DEBUG nova.compute.manager [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1392.568346] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1392.572299] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2266836-a965-4df7-9bd8-01c26c711fc6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.582164] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1392.582487] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de330ee2-9dcd-4cf1-9303-af758127574b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.588774] env[63371]: DEBUG oslo_vmware.api [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1392.588774] env[63371]: value = "task-1773728" [ 1392.588774] env[63371]: _type = "Task" [ 1392.588774] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1392.600618] env[63371]: DEBUG oslo_vmware.api [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773728, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1392.799708] env[63371]: INFO nova.virt.block_device [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Booting with volume 1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8 at /dev/sda [ 1392.869668] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e162c4be-5497-407b-a2f8-9ba67668ca44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.885209] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5c35d95-0b92-4951-99e7-cd0260d4d1ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.911013] env[63371]: DEBUG oslo_vmware.api [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773724, 'name': PowerOnVM_Task, 'duration_secs': 0.794425} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1392.922906] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1392.923165] env[63371]: INFO nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Took 9.50 seconds to spawn the instance on the hypervisor. [ 1392.923342] env[63371]: DEBUG nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1392.924371] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99baa3c-4dd2-4efd-a8f4-3f71ea81995a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.927061] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a71a912-ad2f-4407-bf08-f186cdfc1ed7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.942783] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5445c40-a30e-4d46-a7b8-7b33e12e5b80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.989925] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8716f1f3-a6e0-4988-a038-01ea7700acc7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.997626] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-492b180e-b7a8-48b8-8dd0-82a98712add3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.012262] env[63371]: DEBUG nova.virt.block_device [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updating existing volume attachment record: d0b4bf9b-f6ef-410a-a228-58c967414f22 {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1393.044691] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773726, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.051266] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.164337} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.051559] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1393.052377] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53655875-5f81-402d-9427-af1163b45fdb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.087197] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 76c861a7-30f2-40f4-b723-7912975f36f8/76c861a7-30f2-40f4-b723-7912975f36f8.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1393.091602] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bce3409-9f16-467a-b01d-a8b73db16eb1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.115772] env[63371]: DEBUG oslo_vmware.api [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773728, 'name': PowerOffVM_Task, 'duration_secs': 0.413569} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.117135] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1393.118090] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1393.118090] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1393.118090] env[63371]: value = "task-1773729" [ 1393.118090] env[63371]: _type = "Task" [ 1393.118090] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.118090] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d8b2e74c-93a7-49ea-becb-0b736e661f7f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.127706] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773729, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.235330] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f385370-5907-4aee-9528-09899162db82 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.246726] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a49fc80-ce3e-46bb-97dd-0b4c1561f6b7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.284238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58d1835-5abb-42f1-8ab4-9c49a6d1728f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.286623] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1393.286843] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1393.287025] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Deleting the datastore file [datastore1] af1281ba-c3be-43b4-a039-86d94bd9efe4 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1393.287670] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b7fea87f-8389-4d59-a122-5e3c6da3edd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.297074] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032d6084-7b4a-4037-b6ed-b198a5943f22 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.300388] env[63371]: DEBUG oslo_vmware.api [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for the task: (returnval){ [ 1393.300388] env[63371]: value = "task-1773731" [ 1393.300388] env[63371]: _type = "Task" [ 1393.300388] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.311840] env[63371]: DEBUG nova.compute.provider_tree [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1393.318240] env[63371]: DEBUG oslo_vmware.api [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773731, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.452048] env[63371]: INFO nova.compute.manager [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Took 49.06 seconds to build instance. [ 1393.540839] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773726, 'name': ReconfigVM_Task, 'duration_secs': 0.847165} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.541191] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Reconfigured VM instance instance-00000016 to attach disk [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1393.542086] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f6f5dd-c983-48e8-b247-8ef8df036233 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.578535] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9206c9a4-1e0d-4343-a05c-ca08386470c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1393.597759] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1393.597759] env[63371]: value = "task-1773732" [ 1393.597759] env[63371]: _type = "Task" [ 1393.597759] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1393.617938] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773732, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.631150] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773729, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1393.819293] env[63371]: DEBUG nova.scheduler.client.report [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1393.824320] env[63371]: DEBUG oslo_vmware.api [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Task: {'id': task-1773731, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.472805} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1393.825351] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1393.825351] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1393.825521] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1393.825819] env[63371]: INFO nova.compute.manager [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1393.826215] env[63371]: DEBUG oslo.service.loopingcall [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1393.826507] env[63371]: DEBUG nova.compute.manager [-] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1393.826662] env[63371]: DEBUG nova.network.neutron [-] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1393.855828] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1393.856313] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1393.955527] env[63371]: DEBUG oslo_concurrency.lockutils [None req-86c52f14-46ed-4600-b57d-5aa9ba9469e7 tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.802s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.118530] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773732, 'name': ReconfigVM_Task, 'duration_secs': 0.373144} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.119174] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1394.119393] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f96d445-119d-4b42-90f7-31fd5402757a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.131031] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773729, 'name': ReconfigVM_Task, 'duration_secs': 0.848276} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.132351] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 76c861a7-30f2-40f4-b723-7912975f36f8/76c861a7-30f2-40f4-b723-7912975f36f8.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1394.133017] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1394.133017] env[63371]: value = "task-1773733" [ 1394.133017] env[63371]: _type = "Task" [ 1394.133017] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.135148] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d7701c36-df31-46b0-959d-de01424d9c7c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.144170] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1394.144170] env[63371]: value = "task-1773734" [ 1394.144170] env[63371]: _type = "Task" [ 1394.144170] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.147272] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773733, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.157512] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773734, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.329026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.054s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.329026] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1394.330709] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 39.396s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.331104] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.331374] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1394.331760] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.128s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.334500] env[63371]: INFO nova.compute.claims [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1394.340179] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f04fe37-714d-4d0b-a4b6-9cb295a00cb5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.355713] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c19a01-5880-41b1-b7d0-8a1831835ae9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.374158] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188d0a22-2d45-42ad-b6e1-331fa9f5c4e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.383015] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd5a7aa-81a8-4026-ba08-530496da1f86 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.423066] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180028MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1394.423487] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.459265] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1394.610322] env[63371]: DEBUG nova.compute.manager [req-a2762b44-a772-4925-b555-bce3045ffce8 req-9658fccb-a4cd-4b7f-90ba-71dcc4c01641 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Received event network-vif-plugged-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1394.610692] env[63371]: DEBUG oslo_concurrency.lockutils [req-a2762b44-a772-4925-b555-bce3045ffce8 req-9658fccb-a4cd-4b7f-90ba-71dcc4c01641 service nova] Acquiring lock "e00c2e45-b8bc-440b-8b58-a21f127192c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.611049] env[63371]: DEBUG oslo_concurrency.lockutils [req-a2762b44-a772-4925-b555-bce3045ffce8 req-9658fccb-a4cd-4b7f-90ba-71dcc4c01641 service nova] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.611347] env[63371]: DEBUG oslo_concurrency.lockutils [req-a2762b44-a772-4925-b555-bce3045ffce8 req-9658fccb-a4cd-4b7f-90ba-71dcc4c01641 service nova] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.611916] env[63371]: DEBUG nova.compute.manager [req-a2762b44-a772-4925-b555-bce3045ffce8 req-9658fccb-a4cd-4b7f-90ba-71dcc4c01641 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] No waiting events found dispatching network-vif-plugged-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1394.611916] env[63371]: WARNING nova.compute.manager [req-a2762b44-a772-4925-b555-bce3045ffce8 req-9658fccb-a4cd-4b7f-90ba-71dcc4c01641 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Received unexpected event network-vif-plugged-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 for instance with vm_state building and task_state block_device_mapping. [ 1394.654804] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773733, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.655713] env[63371]: DEBUG nova.network.neutron [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Successfully updated port: 5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1394.663362] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773734, 'name': Rename_Task, 'duration_secs': 0.209442} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1394.663652] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1394.663911] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7b0e2c9-5baa-4609-9bd2-c9c534fc5a47 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.672280] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1394.672280] env[63371]: value = "task-1773735" [ 1394.672280] env[63371]: _type = "Task" [ 1394.672280] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.694076] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773735, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.833044] env[63371]: DEBUG nova.compute.utils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1394.834748] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1394.835202] env[63371]: DEBUG nova.network.neutron [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1394.850218] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.850567] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.851250] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1394.851624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1394.851864] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1394.856242] env[63371]: INFO nova.compute.manager [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Terminating instance [ 1394.858098] env[63371]: DEBUG nova.compute.manager [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1394.858366] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1394.859436] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7e43e6-28a9-4b78-8859-5059c0cf120d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.867508] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1394.867752] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1d5a0933-4d6a-4724-bc9d-c8e0dad151c1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1394.873812] env[63371]: DEBUG oslo_vmware.api [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1394.873812] env[63371]: value = "task-1773736" [ 1394.873812] env[63371]: _type = "Task" [ 1394.873812] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1394.882344] env[63371]: DEBUG oslo_vmware.api [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773736, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1394.915900] env[63371]: DEBUG nova.policy [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a258c38635014fdf9c6e3907bda2fd03', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a884a9d1a3ae410b858851431c166183', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1394.990380] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1395.010959] env[63371]: DEBUG nova.network.neutron [-] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1395.152920] env[63371]: DEBUG oslo_vmware.api [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773733, 'name': PowerOnVM_Task, 'duration_secs': 0.566441} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.153205] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1395.155942] env[63371]: DEBUG nova.compute.manager [None req-4fe77cbb-cb11-4eab-a1cf-a20ff11435ee tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1395.156924] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc5769e-7f27-4136-8894-ed4f38a69a5a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.161509] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquiring lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1395.161641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquired lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.161779] env[63371]: DEBUG nova.network.neutron [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1395.170620] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1395.171153] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1395.171360] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1395.172051] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1395.172051] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1395.172051] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1395.172051] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1395.173189] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1395.173573] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1395.173605] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1395.173757] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1395.173938] env[63371]: DEBUG nova.virt.hardware [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1395.180019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb65e65c-2239-4d10-a736-ad2e67e1106a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.194441] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56385f61-9e8b-4801-9590-c0c6e9f3ff27 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.198864] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773735, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.271070] env[63371]: DEBUG nova.network.neutron [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Successfully created port: 912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1395.339020] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1395.385428] env[63371]: DEBUG oslo_vmware.api [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773736, 'name': PowerOffVM_Task, 'duration_secs': 0.226973} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.388719] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1395.388896] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1395.389780] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-50efacd1-3794-44bd-8605-69e16e3db27b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.494655] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1395.495038] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1395.495106] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Deleting the datastore file [datastore1] 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1395.495322] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d92000a-3a15-4088-9f96-13c0fcb2121f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.501816] env[63371]: DEBUG oslo_vmware.api [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for the task: (returnval){ [ 1395.501816] env[63371]: value = "task-1773738" [ 1395.501816] env[63371]: _type = "Task" [ 1395.501816] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1395.513869] env[63371]: INFO nova.compute.manager [-] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Took 1.69 seconds to deallocate network for instance. [ 1395.514214] env[63371]: DEBUG oslo_vmware.api [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773738, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1395.696252] env[63371]: DEBUG oslo_vmware.api [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773735, 'name': PowerOnVM_Task, 'duration_secs': 0.675943} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1395.697037] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1395.697277] env[63371]: INFO nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Took 9.51 seconds to spawn the instance on the hypervisor. [ 1395.697466] env[63371]: DEBUG nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1395.698347] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e083cf3-5b26-4087-9754-b205a5aef8af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.773975] env[63371]: DEBUG nova.network.neutron [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1395.945237] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96ca776-363f-46c9-8ece-0568087bf7eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.952898] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a723c8-7492-46ef-8ad8-0aaedfbd76c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.989904] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8472344c-6910-4ad6-bbea-8fef60ad05fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.998117] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa8aa0f-763e-48d7-8a1c-ca5b21df1c36 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.021493] env[63371]: DEBUG nova.compute.provider_tree [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1396.028742] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1396.028742] env[63371]: DEBUG oslo_vmware.api [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Task: {'id': task-1773738, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.434547} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1396.028742] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1396.029104] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1396.029333] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1396.029538] env[63371]: INFO nova.compute.manager [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1396.029836] env[63371]: DEBUG oslo.service.loopingcall [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.030082] env[63371]: DEBUG nova.compute.manager [-] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1396.030306] env[63371]: DEBUG nova.network.neutron [-] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1396.112542] env[63371]: DEBUG nova.network.neutron [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updating instance_info_cache with network_info: [{"id": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "address": "fa:16:3e:97:82:d5", "network": {"id": "f378570d-e12e-4a4b-b779-b22a48508774", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1065947988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf723c611d61478cbb81b2bc474a74f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5279ae43-ba", "ovs_interfaceid": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1396.223375] env[63371]: INFO nova.compute.manager [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Took 51.63 seconds to build instance. [ 1396.347829] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1396.378757] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1396.378757] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1396.378757] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1396.378896] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1396.378896] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1396.378896] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1396.378896] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1396.379958] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1396.380299] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1396.380617] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1396.380941] env[63371]: DEBUG nova.virt.hardware [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1396.384194] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d58743bf-3c4a-415d-8de8-40e173df6405 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.391220] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01113c4e-71c1-4b0d-a176-ddfef7373e9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.528603] env[63371]: DEBUG nova.scheduler.client.report [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1396.615265] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Releasing lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1396.615586] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Instance network_info: |[{"id": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "address": "fa:16:3e:97:82:d5", "network": {"id": "f378570d-e12e-4a4b-b779-b22a48508774", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1065947988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf723c611d61478cbb81b2bc474a74f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5279ae43-ba", "ovs_interfaceid": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1396.615998] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:82:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0cd5d325-3053-407e-a4ee-f627e82a23f9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1396.623810] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Creating folder: Project (bf723c611d61478cbb81b2bc474a74f4). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1396.624151] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-27128b87-b46b-4da2-ad96-3bb5cf05c617 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.639997] env[63371]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1396.640142] env[63371]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63371) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1396.640467] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Folder already exists: Project (bf723c611d61478cbb81b2bc474a74f4). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1396.640659] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Creating folder: Instances. Parent ref: group-v368212. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1396.641136] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb3f02c3-a97d-43d4-90d7-0b830b1cc23e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.651565] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Created folder: Instances in parent group-v368212. [ 1396.651794] env[63371]: DEBUG oslo.service.loopingcall [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1396.652423] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1396.652554] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49dd819b-4c8f-4095-9caf-8a287ab61859 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1396.676938] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1396.676938] env[63371]: value = "task-1773741" [ 1396.676938] env[63371]: _type = "Task" [ 1396.676938] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1396.687089] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773741, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1396.728917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-667c2090-ca15-4e05-b08c-b0ca527b1689 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "76c861a7-30f2-40f4-b723-7912975f36f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.623s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1396.835937] env[63371]: DEBUG nova.compute.manager [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Received event network-vif-deleted-62ec6b3f-aa36-49ba-ab5a-ce568c16837a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1396.835937] env[63371]: DEBUG nova.compute.manager [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Received event network-changed-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1396.835937] env[63371]: DEBUG nova.compute.manager [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Refreshing instance network info cache due to event network-changed-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1396.836155] env[63371]: DEBUG oslo_concurrency.lockutils [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] Acquiring lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1396.836155] env[63371]: DEBUG oslo_concurrency.lockutils [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] Acquired lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1396.836285] env[63371]: DEBUG nova.network.neutron [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Refreshing network info cache for port 5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1396.984890] env[63371]: DEBUG nova.network.neutron [-] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.033717] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.702s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1397.034273] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1397.038318] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.797s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1397.038318] env[63371]: DEBUG nova.objects.instance [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lazy-loading 'resources' on Instance uuid 362d8303-524a-457a-b8d9-2bad87fa816b {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1397.187741] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773741, 'name': CreateVM_Task, 'duration_secs': 0.352925} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.187922] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1397.188707] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'disk_bus': None, 'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368225', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'name': 'volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e00c2e45-b8bc-440b-8b58-a21f127192c7', 'attached_at': '', 'detached_at': '', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'serial': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8'}, 'boot_index': 0, 'device_type': None, 'attachment_id': 'd0b4bf9b-f6ef-410a-a228-58c967414f22', 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=63371) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1397.188927] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Root volume attach. Driver type: vmdk {{(pid=63371) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1397.191095] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc58ac26-0cb6-488a-adf3-29714dc48543 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.198912] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a37eb0af-4a68-447b-8457-bb013ff6186a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.205799] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c699f87-2954-4c27-bf90-3a71094e644b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.214552] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-cb8b4669-50de-4b50-b325-6077d93cd823 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.220425] env[63371]: DEBUG nova.network.neutron [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Successfully updated port: 912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1397.227480] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1397.227480] env[63371]: value = "task-1773742" [ 1397.227480] env[63371]: _type = "Task" [ 1397.227480] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.232220] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1397.237882] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773742, 'name': RelocateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.290647] env[63371]: DEBUG nova.compute.manager [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1397.290880] env[63371]: DEBUG nova.compute.manager [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing instance network info cache due to event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1397.291201] env[63371]: DEBUG oslo_concurrency.lockutils [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.291273] env[63371]: DEBUG oslo_concurrency.lockutils [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.291804] env[63371]: DEBUG nova.network.neutron [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1397.484596] env[63371]: INFO nova.compute.manager [-] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Took 1.45 seconds to deallocate network for instance. [ 1397.541098] env[63371]: DEBUG nova.compute.utils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1397.549604] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1397.549817] env[63371]: DEBUG nova.network.neutron [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1397.668900] env[63371]: DEBUG nova.policy [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5b3af3bbd35846198784331994497179', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '857815a7f15648948bb4ca862473ed06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1397.724991] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "refresh_cache-dc6ef0a7-1744-4b90-b385-913cb796f7d0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1397.724991] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired lock "refresh_cache-dc6ef0a7-1744-4b90-b385-913cb796f7d0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.724991] env[63371]: DEBUG nova.network.neutron [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1397.738935] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773742, 'name': RelocateVM_Task, 'duration_secs': 0.027045} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.742279] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1397.742518] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368225', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'name': 'volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e00c2e45-b8bc-440b-8b58-a21f127192c7', 'attached_at': '', 'detached_at': '', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'serial': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1397.746258] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3866246e-3235-4870-a4c0-3ef8d07a24bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.751622] env[63371]: DEBUG nova.network.neutron [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updated VIF entry in instance network info cache for port 5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1397.752931] env[63371]: DEBUG nova.network.neutron [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updating instance_info_cache with network_info: [{"id": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "address": "fa:16:3e:97:82:d5", "network": {"id": "f378570d-e12e-4a4b-b779-b22a48508774", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1065947988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf723c611d61478cbb81b2bc474a74f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5279ae43-ba", "ovs_interfaceid": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.770324] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1397.774640] env[63371]: DEBUG oslo_concurrency.lockutils [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] Releasing lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1397.774810] env[63371]: DEBUG nova.compute.manager [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Received event network-vif-deleted-00f821d3-2f0a-46f0-9551-f7eefb581c66 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1397.775145] env[63371]: INFO nova.compute.manager [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Neutron deleted interface 00f821d3-2f0a-46f0-9551-f7eefb581c66; detaching it from the instance and deleting it from the info cache [ 1397.775529] env[63371]: DEBUG nova.network.neutron [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.781027] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31ba630-e355-4e9a-beca-d3383150fbe5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.806772] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8/volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1397.809951] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23e8b310-9928-4bc5-b669-2e498ced8a06 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.832701] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1397.832701] env[63371]: value = "task-1773743" [ 1397.832701] env[63371]: _type = "Task" [ 1397.832701] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.844367] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773743, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.993536] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1398.052547] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1398.283529] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-567050ff-b558-4352-b40b-df495b0ff71c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.291277] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde881d4-98b4-4071-b6c1-e89c589474b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.329457] env[63371]: DEBUG nova.compute.manager [req-e995e0ba-3868-4889-97ec-43e48f2a984f req-4521d05b-d771-4969-9b13-9e0344a2a495 service nova] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Detach interface failed, port_id=00f821d3-2f0a-46f0-9551-f7eefb581c66, reason: Instance 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1398.331449] env[63371]: DEBUG nova.network.neutron [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1398.341582] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773743, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.444767] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bac1765-7ac1-4339-b1c2-26a5f6343e79 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.452607] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e005f6-3c46-44bb-acb4-83cbc4fbbb88 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.482568] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b94bc5-0784-4bd5-8114-a7bd3cbf5e1d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.490299] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a03e7b5-2aad-4056-bda8-d370718b9d47 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.503452] env[63371]: DEBUG nova.compute.provider_tree [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1398.536353] env[63371]: DEBUG nova.network.neutron [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updated VIF entry in instance network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1398.536733] env[63371]: DEBUG nova.network.neutron [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.672892] env[63371]: DEBUG nova.network.neutron [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Successfully created port: 386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1398.760115] env[63371]: DEBUG nova.network.neutron [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Updating instance_info_cache with network_info: [{"id": "912c6f7c-cc28-4f29-a362-7a8079dcc422", "address": "fa:16:3e:1e:13:48", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap912c6f7c-cc", "ovs_interfaceid": "912c6f7c-cc28-4f29-a362-7a8079dcc422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.843873] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773743, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.006790] env[63371]: DEBUG nova.scheduler.client.report [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1399.039743] env[63371]: DEBUG oslo_concurrency.lockutils [req-c13ba04e-eb99-4df1-abe5-f0326b5c7430 req-0f33630e-b465-43da-81ed-738f77334536 service nova] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.061628] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1399.093406] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1399.093656] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1399.093809] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1399.093993] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1399.094151] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1399.094298] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1399.094911] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1399.095044] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1399.095223] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1399.095417] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1399.095588] env[63371]: DEBUG nova.virt.hardware [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1399.096790] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09017ddb-5e06-43dd-93a9-133b2ab4cf09 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.107145] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c2c6e1-5b21-45c1-acbe-3cdaa9a8b951 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.154049] env[63371]: DEBUG nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Received event network-vif-plugged-912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1399.154292] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Acquiring lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1399.154497] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.154662] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.154871] env[63371]: DEBUG nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] No waiting events found dispatching network-vif-plugged-912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1399.154977] env[63371]: WARNING nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Received unexpected event network-vif-plugged-912c6f7c-cc28-4f29-a362-7a8079dcc422 for instance with vm_state building and task_state spawning. [ 1399.155141] env[63371]: DEBUG nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Received event network-changed-912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1399.155313] env[63371]: DEBUG nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Refreshing instance network info cache due to event network-changed-912c6f7c-cc28-4f29-a362-7a8079dcc422. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1399.155516] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Acquiring lock "refresh_cache-dc6ef0a7-1744-4b90-b385-913cb796f7d0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.262117] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Releasing lock "refresh_cache-dc6ef0a7-1744-4b90-b385-913cb796f7d0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1399.262462] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Instance network_info: |[{"id": "912c6f7c-cc28-4f29-a362-7a8079dcc422", "address": "fa:16:3e:1e:13:48", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap912c6f7c-cc", "ovs_interfaceid": "912c6f7c-cc28-4f29-a362-7a8079dcc422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1399.262774] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Acquired lock "refresh_cache-dc6ef0a7-1744-4b90-b385-913cb796f7d0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.262948] env[63371]: DEBUG nova.network.neutron [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Refreshing network info cache for port 912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1399.264250] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:13:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '912c6f7c-cc28-4f29-a362-7a8079dcc422', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1399.273300] env[63371]: DEBUG oslo.service.loopingcall [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1399.276126] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1399.276607] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-669c0fb5-b049-4b36-bfb6-01ad3d21871f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.299208] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1399.299208] env[63371]: value = "task-1773744" [ 1399.299208] env[63371]: _type = "Task" [ 1399.299208] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.307701] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773744, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.343819] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773743, 'name': ReconfigVM_Task, 'duration_secs': 1.282759} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.344160] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Reconfigured VM instance instance-0000001a to attach disk [datastore1] volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8/volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1399.349305] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf866f5c-950b-45b5-91b6-5fb03379e671 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.367552] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1399.367552] env[63371]: value = "task-1773745" [ 1399.367552] env[63371]: _type = "Task" [ 1399.367552] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.376247] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773745, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.512203] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.475s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1399.514737] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.816s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1399.515034] env[63371]: DEBUG nova.objects.instance [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1399.547375] env[63371]: INFO nova.scheduler.client.report [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Deleted allocations for instance 362d8303-524a-457a-b8d9-2bad87fa816b [ 1399.563741] env[63371]: DEBUG nova.network.neutron [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Updated VIF entry in instance network info cache for port 912c6f7c-cc28-4f29-a362-7a8079dcc422. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1399.564130] env[63371]: DEBUG nova.network.neutron [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Updating instance_info_cache with network_info: [{"id": "912c6f7c-cc28-4f29-a362-7a8079dcc422", "address": "fa:16:3e:1e:13:48", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.18", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap912c6f7c-cc", "ovs_interfaceid": "912c6f7c-cc28-4f29-a362-7a8079dcc422", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.810404] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773744, 'name': CreateVM_Task, 'duration_secs': 0.339631} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.810580] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1399.811288] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1399.811491] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1399.811761] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1399.812206] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b90f8ed-3676-465d-b312-9b9ebbd68d6d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.816846] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1399.816846] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cad284-5c00-1acd-a5fb-0db4891dfe83" [ 1399.816846] env[63371]: _type = "Task" [ 1399.816846] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.826606] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cad284-5c00-1acd-a5fb-0db4891dfe83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.877163] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773745, 'name': ReconfigVM_Task, 'duration_secs': 0.140544} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.877481] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368225', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'name': 'volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e00c2e45-b8bc-440b-8b58-a21f127192c7', 'attached_at': '', 'detached_at': '', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'serial': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1399.878149] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d3f2dd5c-e08c-4e66-86ab-016df7588bed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.885275] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1399.885275] env[63371]: value = "task-1773746" [ 1399.885275] env[63371]: _type = "Task" [ 1399.885275] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.893921] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773746, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.056156] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77a9f084-5691-40dc-9961-8124f3505328 tempest-TenantUsagesTestJSON-121194719 tempest-TenantUsagesTestJSON-121194719-project-member] Lock "362d8303-524a-457a-b8d9-2bad87fa816b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.757s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.066346] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Releasing lock "refresh_cache-dc6ef0a7-1744-4b90-b385-913cb796f7d0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.066618] env[63371]: DEBUG nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1400.066992] env[63371]: DEBUG nova.compute.manager [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing instance network info cache due to event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1400.066992] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.067105] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.067267] env[63371]: DEBUG nova.network.neutron [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1400.162608] env[63371]: DEBUG nova.compute.manager [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1400.162840] env[63371]: DEBUG nova.compute.manager [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing instance network info cache due to event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1400.162973] env[63371]: DEBUG oslo_concurrency.lockutils [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.333287] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cad284-5c00-1acd-a5fb-0db4891dfe83, 'name': SearchDatastore_Task, 'duration_secs': 0.008709} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.333932] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1400.333932] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1400.334438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1400.334438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1400.334438] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1400.334908] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23436697-ad0a-4a04-b99f-817bef54dd3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.353170] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1400.353225] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1400.353978] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97f549f1-f850-4ebe-ad4b-ed85a96cd9b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.359483] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1400.359483] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e1e7e5-0eee-7424-88e2-c462fe387003" [ 1400.359483] env[63371]: _type = "Task" [ 1400.359483] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.367556] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e1e7e5-0eee-7424-88e2-c462fe387003, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.394828] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773746, 'name': Rename_Task, 'duration_secs': 0.129224} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.395160] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1400.397231] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dfeccbff-4dee-499e-b9da-7e2c75804b59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.404033] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1400.404033] env[63371]: value = "task-1773747" [ 1400.404033] env[63371]: _type = "Task" [ 1400.404033] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.410313] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773747, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.513391] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "b523486c-adae-4322-80be-1f3bf33ca192" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1400.513820] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.530027] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02f0ff47-afb3-4b54-8f66-7d610e98787c tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1400.530027] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.293s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1400.530027] env[63371]: DEBUG nova.objects.instance [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lazy-loading 'resources' on Instance uuid 7841ebd2-0c23-4e32-8b81-42311a32c6fd {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1400.790051] env[63371]: DEBUG nova.network.neutron [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Successfully updated port: 386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1400.873416] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e1e7e5-0eee-7424-88e2-c462fe387003, 'name': SearchDatastore_Task, 'duration_secs': 0.019746} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.874218] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34855aef-3b9b-4346-928b-42cc05b08f59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.880037] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1400.880037] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522b095f-2297-e222-17a7-9d3f39341f6b" [ 1400.880037] env[63371]: _type = "Task" [ 1400.880037] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.888000] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522b095f-2297-e222-17a7-9d3f39341f6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.914761] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773747, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.054504] env[63371]: DEBUG nova.network.neutron [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updated VIF entry in instance network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1401.055447] env[63371]: DEBUG nova.network.neutron [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.292878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "refresh_cache-852e14a7-2f9f-421c-9804-56c885885c7d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.293149] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired lock "refresh_cache-852e14a7-2f9f-421c-9804-56c885885c7d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.293196] env[63371]: DEBUG nova.network.neutron [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1401.394262] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522b095f-2297-e222-17a7-9d3f39341f6b, 'name': SearchDatastore_Task, 'duration_secs': 0.010686} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.394516] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.395065] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] dc6ef0a7-1744-4b90-b385-913cb796f7d0/dc6ef0a7-1744-4b90-b385-913cb796f7d0.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1401.397361] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fa57a445-ca87-4355-91b5-cf3f9cff2fc9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.403420] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1401.403420] env[63371]: value = "task-1773748" [ 1401.403420] env[63371]: _type = "Task" [ 1401.403420] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.420343] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773748, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.420600] env[63371]: DEBUG oslo_vmware.api [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773747, 'name': PowerOnVM_Task, 'duration_secs': 0.641327} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.420884] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1401.421209] env[63371]: INFO nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Took 6.25 seconds to spawn the instance on the hypervisor. [ 1401.421917] env[63371]: DEBUG nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1401.422294] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26bafd88-d5aa-4456-b8c7-8180a2628e97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.508967] env[63371]: DEBUG nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Received event network-vif-plugged-386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1401.509213] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Acquiring lock "852e14a7-2f9f-421c-9804-56c885885c7d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1401.509457] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Lock "852e14a7-2f9f-421c-9804-56c885885c7d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1401.509635] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Lock "852e14a7-2f9f-421c-9804-56c885885c7d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1401.509868] env[63371]: DEBUG nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] No waiting events found dispatching network-vif-plugged-386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1401.510051] env[63371]: WARNING nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Received unexpected event network-vif-plugged-386f3dc5-c792-4979-a938-7ec61bb88563 for instance with vm_state building and task_state spawning. [ 1401.510215] env[63371]: DEBUG nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Received event network-changed-386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1401.510383] env[63371]: DEBUG nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Refreshing instance network info cache due to event network-changed-386f3dc5-c792-4979-a938-7ec61bb88563. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1401.510526] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Acquiring lock "refresh_cache-852e14a7-2f9f-421c-9804-56c885885c7d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1401.558165] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a55c029-cdee-4700-9119-7d8062752a46 req-11189354-efb3-496c-8527-78236214f81f service nova] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1401.564044] env[63371]: DEBUG oslo_concurrency.lockutils [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1401.564340] env[63371]: DEBUG nova.network.neutron [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1401.653416] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f16bf3-75bd-48af-b1a6-0317c941cf36 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.661290] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdb660a-8cf5-415c-97e0-0ea853eeb764 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.701387] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afd27ce-f7aa-4b86-aa8a-dea97c650818 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.709597] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d826a3-c9d2-45b2-89a3-3563bf418e24 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.726030] env[63371]: DEBUG nova.compute.provider_tree [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1401.858395] env[63371]: DEBUG nova.network.neutron [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1401.919612] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773748, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.945380] env[63371]: INFO nova.compute.manager [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Took 48.95 seconds to build instance. [ 1402.113396] env[63371]: DEBUG nova.network.neutron [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Updating instance_info_cache with network_info: [{"id": "386f3dc5-c792-4979-a938-7ec61bb88563", "address": "fa:16:3e:9c:b0:15", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.98", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap386f3dc5-c7", "ovs_interfaceid": "386f3dc5-c792-4979-a938-7ec61bb88563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.228973] env[63371]: DEBUG nova.scheduler.client.report [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1402.363258] env[63371]: DEBUG nova.network.neutron [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updated VIF entry in instance network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1402.363578] env[63371]: DEBUG nova.network.neutron [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1402.418669] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773748, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.727668} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.418669] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] dc6ef0a7-1744-4b90-b385-913cb796f7d0/dc6ef0a7-1744-4b90-b385-913cb796f7d0.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1402.418669] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1402.418669] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b52f02a7-7fc1-47dc-b242-b27004a6e155 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.425235] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1402.425235] env[63371]: value = "task-1773749" [ 1402.425235] env[63371]: _type = "Task" [ 1402.425235] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.436449] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773749, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.447380] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c72ef1f0-e2b7-4540-9d6e-fab5f6be90a6 tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.093s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.615997] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Releasing lock "refresh_cache-852e14a7-2f9f-421c-9804-56c885885c7d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.616887] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Instance network_info: |[{"id": "386f3dc5-c792-4979-a938-7ec61bb88563", "address": "fa:16:3e:9c:b0:15", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.98", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap386f3dc5-c7", "ovs_interfaceid": "386f3dc5-c792-4979-a938-7ec61bb88563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1402.616887] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Acquired lock "refresh_cache-852e14a7-2f9f-421c-9804-56c885885c7d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1402.617328] env[63371]: DEBUG nova.network.neutron [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Refreshing network info cache for port 386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1402.618116] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:b0:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '386f3dc5-c792-4979-a938-7ec61bb88563', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1402.629400] env[63371]: DEBUG oslo.service.loopingcall [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1402.630590] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1402.630974] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-167eaf6e-f73b-458a-a8c4-1ca908284347 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.654523] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1402.654523] env[63371]: value = "task-1773750" [ 1402.654523] env[63371]: _type = "Task" [ 1402.654523] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1402.664695] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773750, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.733931] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.204s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1402.736681] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.902s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1402.736875] env[63371]: DEBUG nova.objects.instance [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lazy-loading 'resources' on Instance uuid 1924d3d2-cc88-4fd2-b509-8463da796658 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1402.762961] env[63371]: INFO nova.scheduler.client.report [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Deleted allocations for instance 7841ebd2-0c23-4e32-8b81-42311a32c6fd [ 1402.867095] env[63371]: DEBUG oslo_concurrency.lockutils [req-584cbf26-cc1f-4a14-b2be-dc3fa55db1c4 req-9e1b2924-be81-4751-a179-b6cc31b10c62 service nova] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1402.935931] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773749, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06576} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1402.936239] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1402.937091] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d81178-b34c-4e94-aa36-bccedb99b43c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.957019] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1402.967294] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] dc6ef0a7-1744-4b90-b385-913cb796f7d0/dc6ef0a7-1744-4b90-b385-913cb796f7d0.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1402.968115] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-019d737d-4738-42a5-ab88-12548556e714 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.990863] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1402.990863] env[63371]: value = "task-1773751" [ 1402.990863] env[63371]: _type = "Task" [ 1402.990863] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.002238] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773751, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.054049] env[63371]: DEBUG nova.compute.manager [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Received event network-changed-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1403.054251] env[63371]: DEBUG nova.compute.manager [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Refreshing instance network info cache due to event network-changed-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1403.054484] env[63371]: DEBUG oslo_concurrency.lockutils [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] Acquiring lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.054687] env[63371]: DEBUG oslo_concurrency.lockutils [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] Acquired lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.054821] env[63371]: DEBUG nova.network.neutron [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Refreshing network info cache for port 5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1403.165311] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773750, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.274503] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d7d2e09-b889-4dd5-a2cc-da189e8dd7ac tempest-ServerExternalEventsTest-234985771 tempest-ServerExternalEventsTest-234985771-project-member] Lock "7841ebd2-0c23-4e32-8b81-42311a32c6fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.226s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.369183] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.369433] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.370657] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.370657] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1403.370657] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1403.372623] env[63371]: INFO nova.compute.manager [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Terminating instance [ 1403.374398] env[63371]: DEBUG nova.compute.manager [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1403.374585] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1403.375545] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de97c9dc-881f-4d11-8ed3-5b401457ce12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.388457] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1403.388755] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99ad1241-acaa-4b9f-9084-b27fe7e03adc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.405789] env[63371]: DEBUG oslo_vmware.api [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1403.405789] env[63371]: value = "task-1773752" [ 1403.405789] env[63371]: _type = "Task" [ 1403.405789] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.417742] env[63371]: DEBUG oslo_vmware.api [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773752, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.442562] env[63371]: DEBUG nova.network.neutron [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Updated VIF entry in instance network info cache for port 386f3dc5-c792-4979-a938-7ec61bb88563. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1403.442955] env[63371]: DEBUG nova.network.neutron [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Updating instance_info_cache with network_info: [{"id": "386f3dc5-c792-4979-a938-7ec61bb88563", "address": "fa:16:3e:9c:b0:15", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.98", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap386f3dc5-c7", "ovs_interfaceid": "386f3dc5-c792-4979-a938-7ec61bb88563", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.488970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1403.511703] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773751, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.665103] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773750, 'name': CreateVM_Task, 'duration_secs': 0.883692} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.665271] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1403.665976] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.666140] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.666462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1403.666711] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9adfa1f0-5fac-4324-9f00-4319d3496f4e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.675515] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1403.675515] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5239ca2d-d2d9-6ca8-3567-07f1561c74dd" [ 1403.675515] env[63371]: _type = "Task" [ 1403.675515] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1403.686802] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5239ca2d-d2d9-6ca8-3567-07f1561c74dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1403.859159] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46902a3d-094f-4384-8376-d764f7c27789 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.866623] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84a733ad-45e8-4ad8-b41a-e550d56cafbb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.902265] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a034e9f5-eba7-4e34-bed1-be84e2161294 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.912968] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc7a724-ecf0-4bbe-b24f-6366090f9e53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.919631] env[63371]: DEBUG oslo_vmware.api [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773752, 'name': PowerOffVM_Task, 'duration_secs': 0.241018} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1403.920267] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1403.920443] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1403.920685] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e5a5734e-3efa-4f4d-b762-4f70cbb9ac9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1403.932524] env[63371]: DEBUG nova.compute.provider_tree [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1403.951100] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Releasing lock "refresh_cache-852e14a7-2f9f-421c-9804-56c885885c7d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1403.951100] env[63371]: DEBUG nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1403.951100] env[63371]: DEBUG nova.compute.manager [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing instance network info cache due to event network-changed-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1403.951100] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Acquiring lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1403.951100] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Acquired lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.951284] env[63371]: DEBUG nova.network.neutron [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Refreshing network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1404.004489] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773751, 'name': ReconfigVM_Task, 'duration_secs': 0.600375} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.004755] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Reconfigured VM instance instance-0000001b to attach disk [datastore1] dc6ef0a7-1744-4b90-b385-913cb796f7d0/dc6ef0a7-1744-4b90-b385-913cb796f7d0.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1404.005471] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8866e1c2-6fe3-4ce3-a2fb-c64c19c878a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.011441] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1404.011441] env[63371]: value = "task-1773754" [ 1404.011441] env[63371]: _type = "Task" [ 1404.011441] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.022364] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773754, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.025124] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1404.025319] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1404.025487] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Deleting the datastore file [datastore1] 713dfaf5-d11f-4af2-af92-66a596b0ed4a {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1404.025732] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3d000c4d-125c-4da1-a430-763e77980b59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.033588] env[63371]: DEBUG oslo_vmware.api [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for the task: (returnval){ [ 1404.033588] env[63371]: value = "task-1773755" [ 1404.033588] env[63371]: _type = "Task" [ 1404.033588] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.041768] env[63371]: DEBUG oslo_vmware.api [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773755, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.051389] env[63371]: DEBUG nova.network.neutron [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updated VIF entry in instance network info cache for port 5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1404.051780] env[63371]: DEBUG nova.network.neutron [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updating instance_info_cache with network_info: [{"id": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "address": "fa:16:3e:97:82:d5", "network": {"id": "f378570d-e12e-4a4b-b779-b22a48508774", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1065947988-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "bf723c611d61478cbb81b2bc474a74f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0cd5d325-3053-407e-a4ee-f627e82a23f9", "external-id": "nsx-vlan-transportzone-809", "segmentation_id": 809, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5279ae43-ba", "ovs_interfaceid": "5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.185812] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5239ca2d-d2d9-6ca8-3567-07f1561c74dd, 'name': SearchDatastore_Task, 'duration_secs': 0.010238} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.186141] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.186418] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1404.186697] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1404.186945] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1404.187203] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1404.187502] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dab7ee3d-b91b-4348-ad0f-cb25248cb9dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.195266] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1404.195493] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1404.196261] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cb5e2ee-8f88-4829-8930-409439b02c71 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.202524] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1404.202524] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d8c8bc-f197-5db4-d5dd-ba8af7432e5e" [ 1404.202524] env[63371]: _type = "Task" [ 1404.202524] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.210474] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d8c8bc-f197-5db4-d5dd-ba8af7432e5e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.436408] env[63371]: DEBUG nova.scheduler.client.report [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1404.522830] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773754, 'name': Rename_Task, 'duration_secs': 0.177066} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.523144] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1404.523704] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b7e4ac8-d738-49cf-9d14-b58c1cf831ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.531068] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1404.531068] env[63371]: value = "task-1773756" [ 1404.531068] env[63371]: _type = "Task" [ 1404.531068] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.541958] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773756, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.544980] env[63371]: DEBUG oslo_vmware.api [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Task: {'id': task-1773755, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231739} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.547270] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1404.548392] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1404.548392] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1404.548392] env[63371]: INFO nova.compute.manager [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1404.548392] env[63371]: DEBUG oslo.service.loopingcall [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1404.548392] env[63371]: DEBUG nova.compute.manager [-] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1404.548392] env[63371]: DEBUG nova.network.neutron [-] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1404.554675] env[63371]: DEBUG oslo_concurrency.lockutils [req-b92ccda4-a77d-4e79-b005-9a2a5019ae53 req-7b5baec6-72f2-49f0-b7d9-6d934933f27e service nova] Releasing lock "refresh_cache-e00c2e45-b8bc-440b-8b58-a21f127192c7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1404.716202] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d8c8bc-f197-5db4-d5dd-ba8af7432e5e, 'name': SearchDatastore_Task, 'duration_secs': 0.009338} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1404.719099] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ce27633-457a-4907-bac2-b08970b83015 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1404.730365] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1404.730365] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bca30f-1ec6-a5b9-a467-276c963c3994" [ 1404.730365] env[63371]: _type = "Task" [ 1404.730365] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1404.738617] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bca30f-1ec6-a5b9-a467-276c963c3994, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1404.789956] env[63371]: DEBUG nova.network.neutron [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updated VIF entry in instance network info cache for port fbd3a7d0-068b-4df5-be7f-d8bf5fe260de. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1404.789956] env[63371]: DEBUG nova.network.neutron [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [{"id": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "address": "fa:16:3e:fe:18:22", "network": {"id": "979a4c7c-eddc-4743-ad37-5bc8815a6cdb", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1764851353-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "8126cc358323499680ab7423d7b6ce0d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b2ede0e6-8d7a-4018-bb37-25bf388e9867", "external-id": "nsx-vlan-transportzone-945", "segmentation_id": 945, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbd3a7d0-06", "ovs_interfaceid": "fbd3a7d0-068b-4df5-be7f-d8bf5fe260de", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.941284] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.205s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1404.944375] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.343s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1404.946912] env[63371]: INFO nova.compute.claims [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1404.968819] env[63371]: INFO nova.scheduler.client.report [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleted allocations for instance 1924d3d2-cc88-4fd2-b509-8463da796658 [ 1404.992524] env[63371]: DEBUG nova.compute.manager [req-55631f43-f246-46b2-8642-c5ae1b1db542 req-fa07694c-f1c0-4d7a-b451-7cfa63d8c63e service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Received event network-vif-deleted-fbd3a7d0-068b-4df5-be7f-d8bf5fe260de {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1404.992721] env[63371]: INFO nova.compute.manager [req-55631f43-f246-46b2-8642-c5ae1b1db542 req-fa07694c-f1c0-4d7a-b451-7cfa63d8c63e service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Neutron deleted interface fbd3a7d0-068b-4df5-be7f-d8bf5fe260de; detaching it from the instance and deleting it from the info cache [ 1404.992885] env[63371]: DEBUG nova.network.neutron [req-55631f43-f246-46b2-8642-c5ae1b1db542 req-fa07694c-f1c0-4d7a-b451-7cfa63d8c63e service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.043223] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773756, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.241349] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bca30f-1ec6-a5b9-a467-276c963c3994, 'name': SearchDatastore_Task, 'duration_secs': 0.008677} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.241616] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.241868] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 852e14a7-2f9f-421c-9804-56c885885c7d/852e14a7-2f9f-421c-9804-56c885885c7d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1405.242143] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e325ce6f-a7f4-4f1f-9a0e-5a45abb0f279 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.248923] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1405.248923] env[63371]: value = "task-1773757" [ 1405.248923] env[63371]: _type = "Task" [ 1405.248923] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1405.256993] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773757, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.295780] env[63371]: DEBUG oslo_concurrency.lockutils [req-17119476-9815-460b-a980-87f9351cf149 req-a77acb01-5a09-4058-bd54-5ae446b6d166 service nova] Releasing lock "refresh_cache-713dfaf5-d11f-4af2-af92-66a596b0ed4a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1405.427488] env[63371]: DEBUG nova.network.neutron [-] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1405.478544] env[63371]: DEBUG oslo_concurrency.lockutils [None req-49fc824b-7cc6-4736-803c-a2f595a53d57 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "1924d3d2-cc88-4fd2-b509-8463da796658" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.441s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1405.499071] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-69261fb4-686b-46f0-b228-c6fe2c7da055 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.508171] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940970cf-1bbe-4860-aa87-841719a7d2fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.546204] env[63371]: DEBUG nova.compute.manager [req-55631f43-f246-46b2-8642-c5ae1b1db542 req-fa07694c-f1c0-4d7a-b451-7cfa63d8c63e service nova] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Detach interface failed, port_id=fbd3a7d0-068b-4df5-be7f-d8bf5fe260de, reason: Instance 713dfaf5-d11f-4af2-af92-66a596b0ed4a could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1405.556579] env[63371]: DEBUG oslo_vmware.api [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773756, 'name': PowerOnVM_Task, 'duration_secs': 0.856797} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1405.557143] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1405.557472] env[63371]: INFO nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Took 9.21 seconds to spawn the instance on the hypervisor. [ 1405.557809] env[63371]: DEBUG nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1405.558734] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a5e050-c674-4aad-8506-cbd28a7692a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1405.758426] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773757, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1405.930622] env[63371]: INFO nova.compute.manager [-] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Took 1.38 seconds to deallocate network for instance. [ 1406.083434] env[63371]: INFO nova.compute.manager [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Took 51.50 seconds to build instance. [ 1406.259540] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773757, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531749} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.259862] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 852e14a7-2f9f-421c-9804-56c885885c7d/852e14a7-2f9f-421c-9804-56c885885c7d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1406.260113] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1406.260377] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-196a9780-8e0a-4729-a516-93597f35c6a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.270309] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1406.270309] env[63371]: value = "task-1773758" [ 1406.270309] env[63371]: _type = "Task" [ 1406.270309] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.279905] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773758, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.391715] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.391970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.392202] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.392389] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1406.392549] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.395233] env[63371]: INFO nova.compute.manager [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Terminating instance [ 1406.398667] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "refresh_cache-cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1406.398667] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquired lock "refresh_cache-cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1406.398667] env[63371]: DEBUG nova.network.neutron [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1406.438015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1406.494847] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2154c5-c023-4e0c-8323-c097f7d6a722 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.504019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb656da-4c17-4b70-8e4f-cc33c20d6b73 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.535100] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05427ad4-143d-4e55-8121-31c6f28cfde1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.542977] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171eeb49-e6fe-4336-aa8b-0c3af0c45571 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.556344] env[63371]: DEBUG nova.compute.provider_tree [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1406.587986] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6bd2190d-6c09-4672-804d-77fe04a68a65 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1406.781401] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773758, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072881} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1406.781668] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1406.782485] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1ef9d9-b7ba-4c07-87a7-cab36dd93b6d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.805125] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 852e14a7-2f9f-421c-9804-56c885885c7d/852e14a7-2f9f-421c-9804-56c885885c7d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1406.805125] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08a4a841-c2f9-46ed-9ea6-f5eb9833b248 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1406.825094] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1406.825094] env[63371]: value = "task-1773759" [ 1406.825094] env[63371]: _type = "Task" [ 1406.825094] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1406.836603] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773759, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1406.918531] env[63371]: DEBUG nova.network.neutron [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1406.992614] env[63371]: DEBUG nova.network.neutron [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1407.062032] env[63371]: DEBUG nova.scheduler.client.report [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1407.096269] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1407.338875] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773759, 'name': ReconfigVM_Task, 'duration_secs': 0.283502} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1407.339148] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 852e14a7-2f9f-421c-9804-56c885885c7d/852e14a7-2f9f-421c-9804-56c885885c7d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1407.340700] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94ce64ba-087b-48ee-a6e0-46cc633b340e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.347508] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1407.347508] env[63371]: value = "task-1773760" [ 1407.347508] env[63371]: _type = "Task" [ 1407.347508] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.355919] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773760, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.495559] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Releasing lock "refresh_cache-cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1407.496487] env[63371]: DEBUG nova.compute.manager [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1407.496735] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1407.497640] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66642b77-d584-4586-bab7-bb0040f1d225 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.507682] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1407.508598] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ad176cb-8f99-474b-9780-de3f39038ca1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1407.519137] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1407.519137] env[63371]: value = "task-1773761" [ 1407.519137] env[63371]: _type = "Task" [ 1407.519137] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1407.525295] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773761, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.568019] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.621s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1407.568019] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1407.568852] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.915s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1407.570990] env[63371]: INFO nova.compute.claims [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1407.628368] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1407.861804] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773760, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1407.974328] env[63371]: DEBUG nova.compute.manager [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1407.975318] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471c2393-8715-4384-96b8-b57aa9aba811 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.029779] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773761, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.079223] env[63371]: DEBUG nova.compute.utils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1408.080345] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1408.080345] env[63371]: DEBUG nova.network.neutron [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1408.187320] env[63371]: DEBUG nova.policy [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b5bee716ea542f9a463941fa477a897', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9d19f4772ff46d3b3024851822cf833', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1408.365886] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773760, 'name': Rename_Task, 'duration_secs': 0.850859} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.366326] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1408.366586] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58ae7bfe-9f15-4d5d-a3e1-bcb0b6b9e48d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.376013] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1408.376013] env[63371]: value = "task-1773762" [ 1408.376013] env[63371]: _type = "Task" [ 1408.376013] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.386476] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773762, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.493043] env[63371]: INFO nova.compute.manager [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] instance snapshotting [ 1408.496390] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ee59d5-b428-45e9-8c56-8f4a4166be5c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.528104] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ef287ac-5d53-467a-ba3f-703875f1be51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.536385] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773761, 'name': PowerOffVM_Task, 'duration_secs': 0.742507} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.539701] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1408.539701] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1408.543500] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-154da4a1-d532-4415-b6c9-8cc7a07a70a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.563718] env[63371]: DEBUG nova.network.neutron [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Successfully created port: 1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1408.572929] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1408.574095] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1408.574095] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleting the datastore file [datastore1] cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1408.574095] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f44a5fc-b635-4a8c-9a63-746ec9449abc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1408.585026] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for the task: (returnval){ [ 1408.585026] env[63371]: value = "task-1773764" [ 1408.585026] env[63371]: _type = "Task" [ 1408.585026] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1408.585026] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1408.601352] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773764, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1408.891058] env[63371]: DEBUG oslo_vmware.api [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773762, 'name': PowerOnVM_Task, 'duration_secs': 0.489147} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1408.893867] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1408.894190] env[63371]: INFO nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Took 9.83 seconds to spawn the instance on the hypervisor. [ 1408.894389] env[63371]: DEBUG nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1408.895779] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4533151-8005-4a0c-ac54-03ff79c20cd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.046884] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1409.047234] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8ac2f637-1be8-4e96-8f63-d2608428b65a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.059569] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1409.059569] env[63371]: value = "task-1773765" [ 1409.059569] env[63371]: _type = "Task" [ 1409.059569] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1409.069512] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773765, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.105472] env[63371]: DEBUG oslo_vmware.api [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Task: {'id': task-1773764, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109473} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1409.105472] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1409.105472] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1409.105472] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1409.105696] env[63371]: INFO nova.compute.manager [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Took 1.61 seconds to destroy the instance on the hypervisor. [ 1409.105865] env[63371]: DEBUG oslo.service.loopingcall [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1409.106788] env[63371]: DEBUG nova.compute.manager [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1409.106788] env[63371]: DEBUG nova.network.neutron [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1409.135104] env[63371]: DEBUG nova.network.neutron [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1409.256336] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a2c58c-d20e-4970-8f53-b4cb1f228c18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.269266] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7502ca03-b4c5-4c53-ae15-b7cfeaba2719 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.303240] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a14b7bc7-ec00-4b25-bc31-88af381e6dff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.311816] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d442de87-883f-4f62-84d2-e2c17a740b4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.327042] env[63371]: DEBUG nova.compute.provider_tree [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1409.418825] env[63371]: INFO nova.compute.manager [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Took 47.23 seconds to build instance. [ 1409.571818] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773765, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1409.602620] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1409.633601] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1409.633939] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1409.634170] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1409.634506] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1409.634642] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1409.634841] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1409.635477] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1409.635477] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1409.635601] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1409.635824] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1409.636542] env[63371]: DEBUG nova.virt.hardware [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1409.637819] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fb0527-96e0-4fe0-b264-3f0ad77c9003 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.642307] env[63371]: DEBUG nova.network.neutron [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1409.650727] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a8288c-c781-4b68-b397-65ba61f52971 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1409.827913] env[63371]: DEBUG nova.scheduler.client.report [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1409.877071] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "f8119ade-7018-4ad8-82fe-baa0a6753c64" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1409.877310] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1409.922556] env[63371]: DEBUG oslo_concurrency.lockutils [None req-41f4b3d8-54d3-4d9a-b5c8-033e636c86c6 tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "852e14a7-2f9f-421c-9804-56c885885c7d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.697s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.075651] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773765, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.145067] env[63371]: INFO nova.compute.manager [-] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Took 1.04 seconds to deallocate network for instance. [ 1410.146249] env[63371]: DEBUG nova.compute.manager [req-6ebecd98-bc50-46ea-b1d8-24ef9d76b985 req-41b907c2-70d4-4d65-b6ce-12767823592e service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Received event network-vif-plugged-1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1410.146464] env[63371]: DEBUG oslo_concurrency.lockutils [req-6ebecd98-bc50-46ea-b1d8-24ef9d76b985 req-41b907c2-70d4-4d65-b6ce-12767823592e service nova] Acquiring lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.146669] env[63371]: DEBUG oslo_concurrency.lockutils [req-6ebecd98-bc50-46ea-b1d8-24ef9d76b985 req-41b907c2-70d4-4d65-b6ce-12767823592e service nova] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.146829] env[63371]: DEBUG oslo_concurrency.lockutils [req-6ebecd98-bc50-46ea-b1d8-24ef9d76b985 req-41b907c2-70d4-4d65-b6ce-12767823592e service nova] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.146991] env[63371]: DEBUG nova.compute.manager [req-6ebecd98-bc50-46ea-b1d8-24ef9d76b985 req-41b907c2-70d4-4d65-b6ce-12767823592e service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] No waiting events found dispatching network-vif-plugged-1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1410.147395] env[63371]: WARNING nova.compute.manager [req-6ebecd98-bc50-46ea-b1d8-24ef9d76b985 req-41b907c2-70d4-4d65-b6ce-12767823592e service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Received unexpected event network-vif-plugged-1b8eba67-08ba-47de-bad7-2e38e4a7ea31 for instance with vm_state building and task_state spawning. [ 1410.238422] env[63371]: DEBUG nova.network.neutron [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Successfully updated port: 1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1410.247426] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "852e14a7-2f9f-421c-9804-56c885885c7d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.247700] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "852e14a7-2f9f-421c-9804-56c885885c7d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.247931] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "852e14a7-2f9f-421c-9804-56c885885c7d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.248246] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "852e14a7-2f9f-421c-9804-56c885885c7d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.248371] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "852e14a7-2f9f-421c-9804-56c885885c7d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.251966] env[63371]: INFO nova.compute.manager [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Terminating instance [ 1410.254114] env[63371]: DEBUG nova.compute.manager [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1410.254377] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1410.256115] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d4fe6a2-1a6c-4c2b-a9c0-163387e80be1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.266123] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1410.266384] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db1dff37-30f8-4706-9e1a-977572de3369 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.274446] env[63371]: DEBUG oslo_vmware.api [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1410.274446] env[63371]: value = "task-1773766" [ 1410.274446] env[63371]: _type = "Task" [ 1410.274446] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.283342] env[63371]: DEBUG oslo_vmware.api [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773766, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.338019] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1410.338019] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1410.339287] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.960s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1410.341315] env[63371]: INFO nova.compute.claims [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1410.423623] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1410.575124] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773765, 'name': CreateSnapshot_Task, 'duration_secs': 1.349315} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.575458] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1410.576328] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c803ff7-fe18-4daa-aeb7-7613bb7936a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.655061] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1410.744460] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "refresh_cache-4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1410.744460] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired lock "refresh_cache-4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1410.744460] env[63371]: DEBUG nova.network.neutron [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1410.784184] env[63371]: DEBUG oslo_vmware.api [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773766, 'name': PowerOffVM_Task, 'duration_secs': 0.201842} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1410.784657] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1410.784657] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1410.784907] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-957d5213-a9d0-4027-ad19-87ed8b7e5171 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.848029] env[63371]: DEBUG nova.compute.utils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1410.850908] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1410.851112] env[63371]: DEBUG nova.network.neutron [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1410.854859] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1410.855073] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1410.855251] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleting the datastore file [datastore1] 852e14a7-2f9f-421c-9804-56c885885c7d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1410.855827] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26c824a1-44ed-4f66-ae1e-d937a1f86bc4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1410.863276] env[63371]: DEBUG oslo_vmware.api [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for the task: (returnval){ [ 1410.863276] env[63371]: value = "task-1773768" [ 1410.863276] env[63371]: _type = "Task" [ 1410.863276] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1410.873682] env[63371]: DEBUG oslo_vmware.api [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773768, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1410.944868] env[63371]: DEBUG nova.policy [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aacd81490704110b6cc6aba338883a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a5897667b6b47deb7ff5b64f9499f36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1410.949382] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1411.096481] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1411.096906] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fb19541b-fc27-4b4f-bb91-7b7f3d021662 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.106477] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1411.106477] env[63371]: value = "task-1773769" [ 1411.106477] env[63371]: _type = "Task" [ 1411.106477] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1411.114910] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773769, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.289294] env[63371]: DEBUG nova.network.neutron [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1411.316311] env[63371]: DEBUG nova.network.neutron [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Successfully created port: d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1411.358126] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1411.374821] env[63371]: DEBUG oslo_vmware.api [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Task: {'id': task-1773768, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149927} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1411.375063] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1411.375262] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1411.375455] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1411.375589] env[63371]: INFO nova.compute.manager [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1411.375857] env[63371]: DEBUG oslo.service.loopingcall [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1411.375993] env[63371]: DEBUG nova.compute.manager [-] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1411.376092] env[63371]: DEBUG nova.network.neutron [-] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1411.626558] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773769, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1411.643905] env[63371]: DEBUG nova.network.neutron [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Updating instance_info_cache with network_info: [{"id": "1b8eba67-08ba-47de-bad7-2e38e4a7ea31", "address": "fa:16:3e:92:7a:23", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b8eba67-08", "ovs_interfaceid": "1b8eba67-08ba-47de-bad7-2e38e4a7ea31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1411.982039] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bba1b2b-bd2b-4b88-a02d-4ed011bcc07e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.988538] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fbf2eb2-9b60-4ddf-9c65-1c9c2e5f1f7a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.021299] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4070a6-5e21-4551-8b57-8ab96871e08c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.028935] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50268838-edd9-4889-9d0e-3292f90f2f63 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.042425] env[63371]: DEBUG nova.compute.provider_tree [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1412.117292] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773769, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.152895] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Releasing lock "refresh_cache-4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1412.152895] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Instance network_info: |[{"id": "1b8eba67-08ba-47de-bad7-2e38e4a7ea31", "address": "fa:16:3e:92:7a:23", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b8eba67-08", "ovs_interfaceid": "1b8eba67-08ba-47de-bad7-2e38e4a7ea31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1412.153220] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:7a:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b8eba67-08ba-47de-bad7-2e38e4a7ea31', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1412.158689] env[63371]: DEBUG oslo.service.loopingcall [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1412.158907] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1412.159200] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-de02b9b1-8f9f-4d01-ae65-64a47abed162 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.176559] env[63371]: DEBUG nova.compute.manager [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Received event network-changed-1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1412.176753] env[63371]: DEBUG nova.compute.manager [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Refreshing instance network info cache due to event network-changed-1b8eba67-08ba-47de-bad7-2e38e4a7ea31. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1412.176967] env[63371]: DEBUG oslo_concurrency.lockutils [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] Acquiring lock "refresh_cache-4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.177127] env[63371]: DEBUG oslo_concurrency.lockutils [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] Acquired lock "refresh_cache-4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.177285] env[63371]: DEBUG nova.network.neutron [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Refreshing network info cache for port 1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1412.179942] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1412.179942] env[63371]: value = "task-1773770" [ 1412.179942] env[63371]: _type = "Task" [ 1412.179942] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.188685] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773770, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.372569] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1412.399391] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1412.399646] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1412.399823] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1412.400095] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1412.400298] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1412.400450] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1412.400659] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1412.400858] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1412.401066] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1412.401236] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1412.401410] env[63371]: DEBUG nova.virt.hardware [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1412.402291] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e7ebb5-64ed-4077-86c4-80b7e99ddd16 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.410431] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1513633c-8dad-49fb-9f8a-9a6599912949 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.414209] env[63371]: DEBUG nova.network.neutron [-] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1412.548843] env[63371]: DEBUG nova.scheduler.client.report [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1412.618220] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773769, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.691941] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773770, 'name': CreateVM_Task, 'duration_secs': 0.508306} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1412.692274] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1412.693031] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1412.693031] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1412.693353] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1412.693627] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-112de93d-5fca-433e-80a1-8a2fd49096da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.698423] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1412.698423] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c73ba7-ccc6-504e-b44f-1de65f5a5f4c" [ 1412.698423] env[63371]: _type = "Task" [ 1412.698423] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1412.706993] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c73ba7-ccc6-504e-b44f-1de65f5a5f4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1412.917663] env[63371]: INFO nova.compute.manager [-] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Took 1.54 seconds to deallocate network for instance. [ 1412.999296] env[63371]: DEBUG nova.network.neutron [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Updated VIF entry in instance network info cache for port 1b8eba67-08ba-47de-bad7-2e38e4a7ea31. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1412.999641] env[63371]: DEBUG nova.network.neutron [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Updating instance_info_cache with network_info: [{"id": "1b8eba67-08ba-47de-bad7-2e38e4a7ea31", "address": "fa:16:3e:92:7a:23", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.250", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b8eba67-08", "ovs_interfaceid": "1b8eba67-08ba-47de-bad7-2e38e4a7ea31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.053790] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.714s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1413.054341] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1413.057406] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 29.923s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1413.118147] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773769, 'name': CloneVM_Task, 'duration_secs': 1.635374} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.118426] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Created linked-clone VM from snapshot [ 1413.119192] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4f790a-e312-4e23-9bee-130a5b4d3463 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.126483] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Uploading image 67c79a69-90fa-469e-b65b-470387ba8d71 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1413.154603] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1413.154603] env[63371]: value = "vm-368279" [ 1413.154603] env[63371]: _type = "VirtualMachine" [ 1413.154603] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1413.154873] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-19023291-0f48-4224-a6b7-76af45716180 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.162299] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lease: (returnval){ [ 1413.162299] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52521e4b-4dfa-850d-16c2-6a821952414f" [ 1413.162299] env[63371]: _type = "HttpNfcLease" [ 1413.162299] env[63371]: } obtained for exporting VM: (result){ [ 1413.162299] env[63371]: value = "vm-368279" [ 1413.162299] env[63371]: _type = "VirtualMachine" [ 1413.162299] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1413.162534] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the lease: (returnval){ [ 1413.162534] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52521e4b-4dfa-850d-16c2-6a821952414f" [ 1413.162534] env[63371]: _type = "HttpNfcLease" [ 1413.162534] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1413.168877] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1413.168877] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52521e4b-4dfa-850d-16c2-6a821952414f" [ 1413.168877] env[63371]: _type = "HttpNfcLease" [ 1413.168877] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1413.208851] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c73ba7-ccc6-504e-b44f-1de65f5a5f4c, 'name': SearchDatastore_Task, 'duration_secs': 0.009679} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.209166] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.209397] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1413.209628] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.209782] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.210015] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1413.210293] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-91616435-cdcf-4bd6-8156-f39f0cb9ef71 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.230145] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1413.230340] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1413.231108] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b82e7c3-ca71-4b6f-857f-347421b14d39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.236543] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1413.236543] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bde76a-c398-79b8-ab68-082b83670fb1" [ 1413.236543] env[63371]: _type = "Task" [ 1413.236543] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.244550] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bde76a-c398-79b8-ab68-082b83670fb1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.424457] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1413.492417] env[63371]: DEBUG nova.network.neutron [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Successfully updated port: d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1413.502645] env[63371]: DEBUG oslo_concurrency.lockutils [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] Releasing lock "refresh_cache-4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1413.502896] env[63371]: DEBUG nova.compute.manager [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Received event network-vif-deleted-386f3dc5-c792-4979-a938-7ec61bb88563 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1413.503147] env[63371]: INFO nova.compute.manager [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Neutron deleted interface 386f3dc5-c792-4979-a938-7ec61bb88563; detaching it from the instance and deleting it from the info cache [ 1413.503332] env[63371]: DEBUG nova.network.neutron [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1413.562554] env[63371]: DEBUG nova.compute.utils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1413.569185] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1413.569502] env[63371]: DEBUG nova.network.neutron [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1413.632599] env[63371]: DEBUG nova.policy [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4302f381e0948438b9ee23a33a0f982', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35882164a8734563a006675f2ec6ba71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1413.675157] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1413.675157] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52521e4b-4dfa-850d-16c2-6a821952414f" [ 1413.675157] env[63371]: _type = "HttpNfcLease" [ 1413.675157] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1413.677286] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1413.677286] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52521e4b-4dfa-850d-16c2-6a821952414f" [ 1413.677286] env[63371]: _type = "HttpNfcLease" [ 1413.677286] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1413.678294] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6b154e-07e1-4789-a169-19b0bf6537ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.689053] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52202cc9-4d47-68c7-6e3e-d1d1ad1701e0/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1413.689252] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52202cc9-4d47-68c7-6e3e-d1d1ad1701e0/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1413.762841] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bde76a-c398-79b8-ab68-082b83670fb1, 'name': SearchDatastore_Task, 'duration_secs': 0.00877} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1413.762841] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c821bec-3d94-428d-b53e-f8cfc4e7252d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.767811] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1413.767811] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b769d1-345e-6b4a-1396-5d92915b42bd" [ 1413.767811] env[63371]: _type = "Task" [ 1413.767811] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1413.779113] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b769d1-345e-6b4a-1396-5d92915b42bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1413.787755] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6705bacc-4f31-4f90-99ae-12c059800ed6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.962584] env[63371]: DEBUG nova.network.neutron [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Successfully created port: b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1413.994124] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1413.994267] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1413.994409] env[63371]: DEBUG nova.network.neutron [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1414.014187] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1cfd5152-536b-4235-9623-35df7e24b4d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.027052] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3c65fc-e8d5-4912-8a44-bf9a88f0c751 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.071630] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1414.074503] env[63371]: DEBUG nova.compute.manager [req-91e317ce-f6d0-4b4e-834b-ae74be1c305e req-195a0857-d3c1-45db-b19d-d156479d31b9 service nova] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Detach interface failed, port_id=386f3dc5-c792-4979-a938-7ec61bb88563, reason: Instance 852e14a7-2f9f-421c-9804-56c885885c7d could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1414.195280] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7d2488-6c1b-4b4b-b3fe-5dd0c6b1557e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.203826] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da547c8a-0e0d-40e4-94a7-daccec1edfe0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.246483] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93db7e32-165c-4738-8ab3-81b06fc6987e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.255559] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4fb71b0-1cd7-4c63-8e56-abafca856a13 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.271097] env[63371]: DEBUG nova.compute.provider_tree [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1414.285119] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b769d1-345e-6b4a-1396-5d92915b42bd, 'name': SearchDatastore_Task, 'duration_secs': 0.009726} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1414.285387] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1414.285741] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6/4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1414.288166] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-771dc7e6-4e9a-401b-a406-d9855bd23536 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1414.294673] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1414.294673] env[63371]: value = "task-1773772" [ 1414.294673] env[63371]: _type = "Task" [ 1414.294673] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1414.304515] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773772, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1414.312256] env[63371]: DEBUG nova.compute.manager [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-vif-plugged-d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1414.313047] env[63371]: DEBUG oslo_concurrency.lockutils [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1414.313126] env[63371]: DEBUG oslo_concurrency.lockutils [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1414.313300] env[63371]: DEBUG oslo_concurrency.lockutils [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1414.313530] env[63371]: DEBUG nova.compute.manager [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] No waiting events found dispatching network-vif-plugged-d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1414.313693] env[63371]: WARNING nova.compute.manager [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received unexpected event network-vif-plugged-d92b8632-8794-486c-a8eb-5c8844009035 for instance with vm_state building and task_state spawning. [ 1414.313804] env[63371]: DEBUG nova.compute.manager [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-changed-d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1414.314119] env[63371]: DEBUG nova.compute.manager [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Refreshing instance network info cache due to event network-changed-d92b8632-8794-486c-a8eb-5c8844009035. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1414.314292] env[63371]: DEBUG oslo_concurrency.lockutils [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] Acquiring lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1414.528194] env[63371]: DEBUG nova.network.neutron [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1414.737924] env[63371]: DEBUG nova.network.neutron [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1414.780041] env[63371]: DEBUG nova.scheduler.client.report [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1414.809369] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773772, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.085078] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1415.111361] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1415.111710] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1415.111963] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1415.112189] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1415.112399] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1415.112558] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1415.112841] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1415.113080] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1415.113347] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1415.113513] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1415.113682] env[63371]: DEBUG nova.virt.hardware [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1415.114677] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae070f0-31df-40d5-8d0a-4d07d4a4e763 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.122907] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-269fe18f-2c69-4ac4-a2c3-b7baf0c6883a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.244061] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1415.244497] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Instance network_info: |[{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1415.244861] env[63371]: DEBUG oslo_concurrency.lockutils [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] Acquired lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1415.244982] env[63371]: DEBUG nova.network.neutron [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Refreshing network info cache for port d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1415.246419] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:79:85', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ba3bd22-c936-470e-89bd-b3a5587e87a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd92b8632-8794-486c-a8eb-5c8844009035', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1415.254138] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Creating folder: Project (5a5897667b6b47deb7ff5b64f9499f36). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.255443] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e67433d7-f17b-4f88-baea-6b5b8b1d19ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.266814] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Created folder: Project (5a5897667b6b47deb7ff5b64f9499f36) in parent group-v368199. [ 1415.266995] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Creating folder: Instances. Parent ref: group-v368281. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1415.267239] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-06e32a3f-aa9f-4428-8a4f-053e74936309 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.277055] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Created folder: Instances in parent group-v368281. [ 1415.277298] env[63371]: DEBUG oslo.service.loopingcall [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1415.277521] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1415.277766] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2d8beec-2da0-4e71-bb78-0486ad9886b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.302784] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1415.302784] env[63371]: value = "task-1773775" [ 1415.302784] env[63371]: _type = "Task" [ 1415.302784] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.310133] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773772, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766508} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.310889] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6/4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1415.311149] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1415.312778] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-44a1c836-a74c-4fd2-9faa-cf7cbb591ae5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.318238] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773775, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.323099] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1415.323099] env[63371]: value = "task-1773776" [ 1415.323099] env[63371]: _type = "Task" [ 1415.323099] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.331989] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773776, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.656526] env[63371]: DEBUG nova.network.neutron [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Successfully updated port: b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1415.798512] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.741s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1415.802226] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.473s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1415.807701] env[63371]: INFO nova.compute.claims [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1415.831549] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773775, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1415.837596] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773776, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077667} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1415.837596] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1415.838148] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026c0cfa-a386-4d4d-be8b-6923d7f80309 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.867409] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6/4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1415.869237] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d79aa80e-1466-4d1d-a9b8-07facc18784d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1415.899123] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1415.899123] env[63371]: value = "task-1773777" [ 1415.899123] env[63371]: _type = "Task" [ 1415.899123] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1415.907275] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773777, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.052102] env[63371]: DEBUG nova.network.neutron [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updated VIF entry in instance network info cache for port d92b8632-8794-486c-a8eb-5c8844009035. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1416.052464] env[63371]: DEBUG nova.network.neutron [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1416.160054] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.160054] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1416.160241] env[63371]: DEBUG nova.network.neutron [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1416.334351] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773775, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.382270] env[63371]: INFO nova.scheduler.client.report [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Deleted allocation for migration e496466e-2a3b-442c-9adb-941ce7e06a5e [ 1416.408516] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773777, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.416762] env[63371]: DEBUG nova.compute.manager [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Received event network-vif-plugged-b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1416.416976] env[63371]: DEBUG oslo_concurrency.lockutils [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] Acquiring lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1416.417524] env[63371]: DEBUG oslo_concurrency.lockutils [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1416.417524] env[63371]: DEBUG oslo_concurrency.lockutils [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.417524] env[63371]: DEBUG nova.compute.manager [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] No waiting events found dispatching network-vif-plugged-b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1416.417716] env[63371]: WARNING nova.compute.manager [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Received unexpected event network-vif-plugged-b03ddfde-3b36-43a8-8c6a-00cd704bce22 for instance with vm_state building and task_state spawning. [ 1416.417936] env[63371]: DEBUG nova.compute.manager [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Received event network-changed-b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1416.418028] env[63371]: DEBUG nova.compute.manager [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Refreshing instance network info cache due to event network-changed-b03ddfde-3b36-43a8-8c6a-00cd704bce22. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1416.418163] env[63371]: DEBUG oslo_concurrency.lockutils [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] Acquiring lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1416.555718] env[63371]: DEBUG oslo_concurrency.lockutils [req-a3dfe403-f002-493b-ad3a-30f4531cc220 req-25af67c9-494d-4c7c-8d9d-b71ed42309e6 service nova] Releasing lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1416.706319] env[63371]: DEBUG nova.network.neutron [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1416.837388] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773775, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.890133] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e9aa82e-c1c0-4907-8fe3-ac48ea4a9dd3 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 36.765s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1416.909185] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773777, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1416.934795] env[63371]: DEBUG nova.network.neutron [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updating instance_info_cache with network_info: [{"id": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "address": "fa:16:3e:b7:8f:81", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb03ddfde-3b", "ovs_interfaceid": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1417.333086] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773775, 'name': CreateVM_Task, 'duration_secs': 1.6653} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.333276] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1417.333964] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.337782] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.337782] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1417.337782] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d476bbe8-8cd6-430a-b8f9-d49a31e8dd10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.340845] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1417.340845] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522d3c90-dec2-d051-8f13-94bd82c205ba" [ 1417.340845] env[63371]: _type = "Task" [ 1417.340845] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.350938] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522d3c90-dec2-d051-8f13-94bd82c205ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.410967] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773777, 'name': ReconfigVM_Task, 'duration_secs': 1.425376} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.410967] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6/4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1417.411839] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4d86df70-c146-473f-8a1a-a1909f977e4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.419113] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1417.419113] env[63371]: value = "task-1773778" [ 1417.419113] env[63371]: _type = "Task" [ 1417.419113] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.426948] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773778, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.439902] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.439902] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Instance network_info: |[{"id": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "address": "fa:16:3e:b7:8f:81", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb03ddfde-3b", "ovs_interfaceid": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1417.441293] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fef0855c-eada-40bb-8a0a-4d85f8d9a12a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.443583] env[63371]: DEBUG oslo_concurrency.lockutils [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] Acquired lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.443786] env[63371]: DEBUG nova.network.neutron [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Refreshing network info cache for port b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1417.445065] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:8f:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b03ddfde-3b36-43a8-8c6a-00cd704bce22', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1417.453420] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Creating folder: Project (35882164a8734563a006675f2ec6ba71). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1417.454698] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef551d84-f33c-4bd2-a0f4-8640460f1b55 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.461850] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84017d0-42d9-4abd-9a04-a797e11ff587 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.466573] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Created folder: Project (35882164a8734563a006675f2ec6ba71) in parent group-v368199. [ 1417.466766] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Creating folder: Instances. Parent ref: group-v368284. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1417.467012] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9291d36-c3ce-4bd3-aafb-a41af8a1cb39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.493851] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4a9aa6-c4d9-494c-b63f-b8f1f1273b41 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.502015] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde8e194-3003-4eab-9648-8a2849a4dd6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.507242] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Created folder: Instances in parent group-v368284. [ 1417.507504] env[63371]: DEBUG oslo.service.loopingcall [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1417.508168] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1417.509030] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-104a0c3b-84ec-46ce-80a1-ac5da9d3ade1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.531320] env[63371]: DEBUG nova.compute.provider_tree [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1417.539369] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1417.539369] env[63371]: value = "task-1773781" [ 1417.539369] env[63371]: _type = "Task" [ 1417.539369] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.549312] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773781, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.862256] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522d3c90-dec2-d051-8f13-94bd82c205ba, 'name': SearchDatastore_Task, 'duration_secs': 0.012532} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.862635] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.862887] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1417.863111] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1417.863258] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1417.863433] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1417.863698] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0784ba7a-7bf8-4f16-9fee-1f7ef3945c59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.879743] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1417.880131] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1417.884864] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29a06dd7-571a-48c1-afa1-998339e6b836 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.895656] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1417.895656] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca2347-90a5-4ef6-e2c9-a4168249c7aa" [ 1417.895656] env[63371]: _type = "Task" [ 1417.895656] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.917800] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca2347-90a5-4ef6-e2c9-a4168249c7aa, 'name': SearchDatastore_Task, 'duration_secs': 0.012017} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.918450] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d36d923a-e4d7-4c51-8720-14ac14ec14cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.941430] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1417.941430] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5244365c-7a31-d35c-8ba6-da9cc91110ba" [ 1417.941430] env[63371]: _type = "Task" [ 1417.941430] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.953822] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773778, 'name': Rename_Task, 'duration_secs': 0.182312} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.956548] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1417.957271] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9def46f0-902e-4a86-8bfc-f2755385750e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.963781] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5244365c-7a31-d35c-8ba6-da9cc91110ba, 'name': SearchDatastore_Task, 'duration_secs': 0.012308} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1417.964435] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1417.964695] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05/7e463dd7-84a6-4e6d-ae8f-0860e3a20f05.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1417.964956] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-097c7ad8-b95a-45ee-b21c-9b9365cb829e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1417.969377] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1417.969377] env[63371]: value = "task-1773782" [ 1417.969377] env[63371]: _type = "Task" [ 1417.969377] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.973922] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1417.973922] env[63371]: value = "task-1773783" [ 1417.973922] env[63371]: _type = "Task" [ 1417.973922] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1417.980182] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773782, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1417.985528] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773783, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.034219] env[63371]: DEBUG nova.scheduler.client.report [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1418.053545] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773781, 'name': CreateVM_Task, 'duration_secs': 0.479732} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.053748] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1418.054485] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.054646] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.054959] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1418.055253] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4bc5981c-d16f-4aa1-b5ef-62ebd7ee6920 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.059973] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1418.059973] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9a610-3cc9-e16d-570a-1401d1514de1" [ 1418.059973] env[63371]: _type = "Task" [ 1418.059973] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.068645] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9a610-3cc9-e16d-570a-1401d1514de1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.235116] env[63371]: DEBUG nova.network.neutron [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updated VIF entry in instance network info cache for port b03ddfde-3b36-43a8-8c6a-00cd704bce22. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1418.235589] env[63371]: DEBUG nova.network.neutron [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updating instance_info_cache with network_info: [{"id": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "address": "fa:16:3e:b7:8f:81", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb03ddfde-3b", "ovs_interfaceid": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1418.443034] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1418.443437] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.482155] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773782, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.487226] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773783, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.539340] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.737s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1418.540658] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.924s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1418.542736] env[63371]: INFO nova.compute.claims [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1418.571863] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9a610-3cc9-e16d-570a-1401d1514de1, 'name': SearchDatastore_Task, 'duration_secs': 0.016419} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.572270] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.572482] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1418.572730] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1418.572870] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1418.573073] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1418.573712] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66ea5130-c52d-45aa-b0cc-fcb571f9e870 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.587101] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1418.587336] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1418.588139] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75cec582-006d-436d-bfbf-648432eebe53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.594243] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1418.594243] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ba1797-7284-e595-2804-531c3625a187" [ 1418.594243] env[63371]: _type = "Task" [ 1418.594243] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1418.605341] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ba1797-7284-e595-2804-531c3625a187, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1418.738763] env[63371]: DEBUG oslo_concurrency.lockutils [req-0e8238ff-1c67-4b23-8c8c-a641338fe854 req-1d90ed0a-8198-4010-a599-3b2d7f36a81d service nova] Releasing lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1418.983463] env[63371]: DEBUG oslo_vmware.api [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773782, 'name': PowerOnVM_Task, 'duration_secs': 0.860292} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.985543] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1418.985791] env[63371]: INFO nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Took 9.38 seconds to spawn the instance on the hypervisor. [ 1418.986015] env[63371]: DEBUG nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1418.989581] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae445254-d264-4017-b6e8-e5a4aca8907a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1418.992057] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773783, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.661202} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1418.992305] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05/7e463dd7-84a6-4e6d-ae8f-0860e3a20f05.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1418.992509] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1418.993104] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e9beec18-74bb-47cb-8b89-fb2c19ce077b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.003828] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1419.003828] env[63371]: value = "task-1773784" [ 1419.003828] env[63371]: _type = "Task" [ 1419.003828] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.013035] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773784, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.049281] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "d6de04c1-3475-4ac1-8e17-b5905ca9b7a9" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1419.049562] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "d6de04c1-3475-4ac1-8e17-b5905ca9b7a9" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1419.104992] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ba1797-7284-e595-2804-531c3625a187, 'name': SearchDatastore_Task, 'duration_secs': 0.02289} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.105677] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa33deac-4aef-42d8-b609-1da94bc68280 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.110730] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1419.110730] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bdaad1-9ffb-d29c-33a3-61e38b19e9d8" [ 1419.110730] env[63371]: _type = "Task" [ 1419.110730] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.118117] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bdaad1-9ffb-d29c-33a3-61e38b19e9d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.511880] env[63371]: INFO nova.compute.manager [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Took 50.93 seconds to build instance. [ 1419.518916] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773784, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065839} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.519187] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1419.519989] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e231729f-e978-4e29-ad32-02bc592977be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.543240] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05/7e463dd7-84a6-4e6d-ae8f-0860e3a20f05.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1419.543731] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a39c89f0-b284-46e3-b17c-e1b61f16cdd3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.560879] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "d6de04c1-3475-4ac1-8e17-b5905ca9b7a9" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.511s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1419.561335] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1419.570538] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1419.570538] env[63371]: value = "task-1773785" [ 1419.570538] env[63371]: _type = "Task" [ 1419.570538] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.624730] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bdaad1-9ffb-d29c-33a3-61e38b19e9d8, 'name': SearchDatastore_Task, 'duration_secs': 0.040512} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1419.624995] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1419.625265] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 44cc8606-24f5-4f6b-b96f-3559c9c3f06e/44cc8606-24f5-4f6b-b96f-3559c9c3f06e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1419.625527] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-480825fc-23d5-48c0-b14f-a63a712c56f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.633646] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1419.633646] env[63371]: value = "task-1773786" [ 1419.633646] env[63371]: _type = "Task" [ 1419.633646] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.644843] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773786, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1419.890884] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafcceba-af1a-4b41-b401-ad41bf096811 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.898024] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Suspending the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1419.898024] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-3cf1c19f-12e4-4f4f-af44-b444cc6ecbaa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1419.906747] env[63371]: DEBUG oslo_vmware.api [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] Waiting for the task: (returnval){ [ 1419.906747] env[63371]: value = "task-1773787" [ 1419.906747] env[63371]: _type = "Task" [ 1419.906747] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1419.917718] env[63371]: DEBUG oslo_vmware.api [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] Task: {'id': task-1773787, 'name': SuspendVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.016203] env[63371]: DEBUG oslo_concurrency.lockutils [None req-70f63dc5-74a3-4158-9c81-0145046535b9 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.073s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1420.068316] env[63371]: DEBUG nova.compute.utils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1420.070379] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1420.070557] env[63371]: DEBUG nova.network.neutron [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1420.083628] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.086675] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6bedae-2cb6-44e2-b89b-cf3d13213883 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.095114] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94d4ed7-ddf5-4158-80df-0c7c8b473f8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.126631] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9109301c-1bd6-48e8-8815-7386dd3e973e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.130165] env[63371]: DEBUG nova.policy [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1819eca41829451e9a866d7e34cbe801', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1bda622ab1474b76a46a5ba68977188b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1420.140758] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b7f4f5-2662-465c-857d-7d3fa4bcc90a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1420.151668] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773786, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.159732] env[63371]: DEBUG nova.compute.provider_tree [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1420.424882] env[63371]: DEBUG oslo_vmware.api [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] Task: {'id': task-1773787, 'name': SuspendVM_Task} progress is 62%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.488458] env[63371]: DEBUG nova.network.neutron [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Successfully created port: 292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1420.520024] env[63371]: DEBUG nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1420.575170] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1420.590275] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.644378] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773786, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1420.663158] env[63371]: DEBUG nova.scheduler.client.report [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1420.918450] env[63371]: DEBUG oslo_vmware.api [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] Task: {'id': task-1773787, 'name': SuspendVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.043453] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1421.091142] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773785, 'name': ReconfigVM_Task, 'duration_secs': 1.342121} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.091871] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05/7e463dd7-84a6-4e6d-ae8f-0860e3a20f05.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1421.092288] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e964421-06b9-4236-9139-f85d0184d092 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.098905] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1421.098905] env[63371]: value = "task-1773788" [ 1421.098905] env[63371]: _type = "Task" [ 1421.098905] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.107905] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773788, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.144036] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773786, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.020922} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.144298] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 44cc8606-24f5-4f6b-b96f-3559c9c3f06e/44cc8606-24f5-4f6b-b96f-3559c9c3f06e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1421.144507] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1421.144822] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d3fc1bf-12c5-42d1-9bba-5fdd13d83b87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.153364] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1421.153364] env[63371]: value = "task-1773789" [ 1421.153364] env[63371]: _type = "Task" [ 1421.153364] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.163015] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773789, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.171047] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.630s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1421.171585] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1421.174708] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 26.751s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1421.419920] env[63371]: DEBUG oslo_vmware.api [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] Task: {'id': task-1773787, 'name': SuspendVM_Task, 'duration_secs': 1.028245} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.420233] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Suspended the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1421.420416] env[63371]: DEBUG nova.compute.manager [None req-11dcbfe0-ec5a-4aa3-8ead-c386b95417da tempest-ServersAdminNegativeTestJSON-236221674 tempest-ServersAdminNegativeTestJSON-236221674-project-admin] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1421.421238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa0211d-53ad-4a28-b3bc-56463e819614 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.594063] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1421.609522] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773788, 'name': Rename_Task, 'duration_secs': 0.19632} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.609825] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1421.610118] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ba28dfd-1a52-477c-969b-b683b29feeef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.618331] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1421.618331] env[63371]: value = "task-1773790" [ 1421.618331] env[63371]: _type = "Task" [ 1421.618331] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.620514] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1421.620755] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1421.620913] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1421.621135] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1421.621284] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1421.621443] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1421.621652] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1421.621826] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1421.622020] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1421.622200] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1421.622381] env[63371]: DEBUG nova.virt.hardware [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1421.623261] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b05fe69-d8f6-4a72-a7e8-e314c313e829 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.635705] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773790, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.636852] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498f4b80-1272-4961-b9d6-a4887847f1e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.661689] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773789, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062443} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1421.661962] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1421.662770] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24ebfbe-b6d0-4807-ac80-7e8e60968f9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.693560] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 44cc8606-24f5-4f6b-b96f-3559c9c3f06e/44cc8606-24f5-4f6b-b96f-3559c9c3f06e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1421.694967] env[63371]: DEBUG nova.compute.utils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1421.696842] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66ce78c8-4cb1-4f28-b066-1c61afae68a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.713545] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1421.716352] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1421.716763] env[63371]: DEBUG nova.network.neutron [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1421.725600] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1421.725600] env[63371]: value = "task-1773791" [ 1421.725600] env[63371]: _type = "Task" [ 1421.725600] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1421.732740] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773791, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1421.776315] env[63371]: DEBUG nova.policy [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3dec49b67cd49159192b5c2756fc2e6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0f2fde472b14ab9a4d20947ca714191', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1422.133298] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773790, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.178938] env[63371]: DEBUG nova.compute.manager [req-a1ee5992-ecc0-44ad-b401-6347f4d2613d req-294d3906-bf97-47de-961c-67acc4b65d7b service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Received event network-vif-plugged-292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1422.179167] env[63371]: DEBUG oslo_concurrency.lockutils [req-a1ee5992-ecc0-44ad-b401-6347f4d2613d req-294d3906-bf97-47de-961c-67acc4b65d7b service nova] Acquiring lock "9249f27a-1985-4be1-947c-e433c7aa26f1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1422.179378] env[63371]: DEBUG oslo_concurrency.lockutils [req-a1ee5992-ecc0-44ad-b401-6347f4d2613d req-294d3906-bf97-47de-961c-67acc4b65d7b service nova] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1422.179541] env[63371]: DEBUG oslo_concurrency.lockutils [req-a1ee5992-ecc0-44ad-b401-6347f4d2613d req-294d3906-bf97-47de-961c-67acc4b65d7b service nova] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1422.180119] env[63371]: DEBUG nova.compute.manager [req-a1ee5992-ecc0-44ad-b401-6347f4d2613d req-294d3906-bf97-47de-961c-67acc4b65d7b service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] No waiting events found dispatching network-vif-plugged-292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1422.180119] env[63371]: WARNING nova.compute.manager [req-a1ee5992-ecc0-44ad-b401-6347f4d2613d req-294d3906-bf97-47de-961c-67acc4b65d7b service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Received unexpected event network-vif-plugged-292614c6-49c4-4096-afda-debce88edee1 for instance with vm_state building and task_state spawning. [ 1422.233327] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773791, 'name': ReconfigVM_Task, 'duration_secs': 0.290259} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.233611] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 44cc8606-24f5-4f6b-b96f-3559c9c3f06e/44cc8606-24f5-4f6b-b96f-3559c9c3f06e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1422.234295] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-094e2bea-905f-482d-9f89-5bf054c7ecbb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.244454] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1422.244454] env[63371]: value = "task-1773792" [ 1422.244454] env[63371]: _type = "Task" [ 1422.244454] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.244454] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance ca53accc-a15f-4503-87e5-7cbf3e2c0b43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.244454] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 47c1c242-d190-4523-8033-307c5a9b7535 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.244454] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 201a2d1e-9e2c-4c07-92be-200408874ad4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.244814] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 33cf00ea-3195-41cf-9b7a-a8e64496a122 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.244814] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance cbcdfe1a-86a4-4a12-99b5-44d291d41769 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.244814] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance cfbd0c7c-243e-497a-acb1-ab9323c23574 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.244814] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1422.245134] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e4608e3c-7083-42fa-b88c-8ee007ef7f60 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245134] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e0369f27-68ea-49c4-8524-3dbbb3cde96e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245134] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e912c210-3ae1-47ce-b9cd-afebf6195606 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245134] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 713dfaf5-d11f-4af2-af92-66a596b0ed4a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1422.245250] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance af1281ba-c3be-43b4-a039-86d94bd9efe4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1422.245250] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1422.245250] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 76c861a7-30f2-40f4-b723-7912975f36f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245250] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245250] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e00c2e45-b8bc-440b-8b58-a21f127192c7 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245390] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance dc6ef0a7-1744-4b90-b385-913cb796f7d0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245390] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 852e14a7-2f9f-421c-9804-56c885885c7d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1422.245390] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245390] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245500] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 44cc8606-24f5-4f6b-b96f-3559c9c3f06e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245500] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 9249f27a-1985-4be1-947c-e433c7aa26f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.245500] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance b48a8e83-e581-4886-833b-bbce155d40d9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1422.256867] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773792, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.366202] env[63371]: DEBUG nova.network.neutron [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Successfully created port: 9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1422.395182] env[63371]: DEBUG nova.network.neutron [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Successfully updated port: 292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1422.633738] env[63371]: DEBUG oslo_vmware.api [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773790, 'name': PowerOnVM_Task, 'duration_secs': 0.613048} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.634020] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1422.634229] env[63371]: INFO nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Took 10.26 seconds to spawn the instance on the hypervisor. [ 1422.634507] env[63371]: DEBUG nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1422.635241] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8eaa38-087c-4fb5-99b8-5f0262bcd30e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.725360] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1422.754175] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e8bd5802-d2ff-4348-92d4-c23277f4eaeb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1422.765035] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773792, 'name': Rename_Task, 'duration_secs': 0.248461} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1422.765035] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1422.765035] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da99f82f-89d6-4f5a-b3a6-1e54a02715d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.769292] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1422.769681] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1422.770076] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1422.771082] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1422.771539] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1422.771850] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1422.772316] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1422.773021] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1422.773021] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1422.773021] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1422.773585] env[63371]: DEBUG nova.virt.hardware [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1422.775352] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e71e7a-2559-4cc3-b7e3-200f8633f5fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.787674] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696b2b16-2fdf-4850-a08a-64e3666a6ee7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1422.792623] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1422.792623] env[63371]: value = "task-1773793" [ 1422.792623] env[63371]: _type = "Task" [ 1422.792623] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1422.809816] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773793, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1422.900648] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "refresh_cache-9249f27a-1985-4be1-947c-e433c7aa26f1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1422.900648] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquired lock "refresh_cache-9249f27a-1985-4be1-947c-e433c7aa26f1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1422.900648] env[63371]: DEBUG nova.network.neutron [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1423.157585] env[63371]: INFO nova.compute.manager [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Took 45.52 seconds to build instance. [ 1423.257395] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance b5e259ea-d103-41c6-84b3-748813bb514d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1423.310913] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773793, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1423.433134] env[63371]: DEBUG nova.network.neutron [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1423.659966] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d54097ed-e8de-4d1b-8cef-4ee0f9417513 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.573s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.706347] env[63371]: DEBUG nova.network.neutron [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Updating instance_info_cache with network_info: [{"id": "292614c6-49c4-4096-afda-debce88edee1", "address": "fa:16:3e:0d:77:2d", "network": {"id": "d9cab695-43ca-4b82-aeb3-d16f79b8cf36", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-890050549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bda622ab1474b76a46a5ba68977188b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap292614c6-49", "ovs_interfaceid": "292614c6-49c4-4096-afda-debce88edee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1423.760994] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance fb2ddd3e-7adc-4a34-8797-0e98fdf19379 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1423.795023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.795023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.795023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1423.795515] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1423.797136] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1423.803741] env[63371]: INFO nova.compute.manager [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Terminating instance [ 1423.806260] env[63371]: DEBUG nova.compute.manager [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1423.806469] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1423.807272] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432e22e5-2e28-4051-9fc1-d370bc9dceab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.813556] env[63371]: DEBUG oslo_vmware.api [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773793, 'name': PowerOnVM_Task, 'duration_secs': 0.842866} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1423.814205] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1423.814353] env[63371]: INFO nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Took 8.73 seconds to spawn the instance on the hypervisor. [ 1423.814531] env[63371]: DEBUG nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1423.815307] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20fb0a01-4b88-45e6-b83d-1fd1a4ab93aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.821349] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1423.821819] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-753c5e93-f241-4f53-93dc-f70678e69197 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1423.999506] env[63371]: DEBUG nova.compute.manager [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-changed-d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1423.999704] env[63371]: DEBUG nova.compute.manager [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Refreshing instance network info cache due to event network-changed-d92b8632-8794-486c-a8eb-5c8844009035. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1423.999933] env[63371]: DEBUG oslo_concurrency.lockutils [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] Acquiring lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.001053] env[63371]: DEBUG oslo_concurrency.lockutils [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] Acquired lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.001053] env[63371]: DEBUG nova.network.neutron [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Refreshing network info cache for port d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1424.167018] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1424.209209] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Releasing lock "refresh_cache-9249f27a-1985-4be1-947c-e433c7aa26f1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1424.209547] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Instance network_info: |[{"id": "292614c6-49c4-4096-afda-debce88edee1", "address": "fa:16:3e:0d:77:2d", "network": {"id": "d9cab695-43ca-4b82-aeb3-d16f79b8cf36", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-890050549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bda622ab1474b76a46a5ba68977188b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap292614c6-49", "ovs_interfaceid": "292614c6-49c4-4096-afda-debce88edee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1424.210191] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:77:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cca1f087-01e1-49ca-831b-5c51478a5d60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '292614c6-49c4-4096-afda-debce88edee1', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1424.218861] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Creating folder: Project (1bda622ab1474b76a46a5ba68977188b). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1424.219507] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7f5839f-ca3a-4f19-9a9b-ee4a55528a0c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.231383] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Created folder: Project (1bda622ab1474b76a46a5ba68977188b) in parent group-v368199. [ 1424.231579] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Creating folder: Instances. Parent ref: group-v368287. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1424.231899] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7556689b-ae67-41ff-bee9-6cd00e6fa646 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.246459] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Created folder: Instances in parent group-v368287. [ 1424.246703] env[63371]: DEBUG oslo.service.loopingcall [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1424.246890] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1424.247122] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85bc444f-12f0-4cf4-b52c-0dc6fe3b1326 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.266103] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 36b81143-211f-4c77-854b-abe0d3f39ce4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1424.273172] env[63371]: DEBUG nova.compute.manager [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Received event network-changed-292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1424.273382] env[63371]: DEBUG nova.compute.manager [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Refreshing instance network info cache due to event network-changed-292614c6-49c4-4096-afda-debce88edee1. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1424.273602] env[63371]: DEBUG oslo_concurrency.lockutils [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] Acquiring lock "refresh_cache-9249f27a-1985-4be1-947c-e433c7aa26f1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.273744] env[63371]: DEBUG oslo_concurrency.lockutils [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] Acquired lock "refresh_cache-9249f27a-1985-4be1-947c-e433c7aa26f1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.273902] env[63371]: DEBUG nova.network.neutron [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Refreshing network info cache for port 292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1424.283041] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1424.283041] env[63371]: value = "task-1773797" [ 1424.283041] env[63371]: _type = "Task" [ 1424.283041] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.288341] env[63371]: DEBUG nova.network.neutron [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Successfully updated port: 9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1424.292962] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773797, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.338304] env[63371]: INFO nova.compute.manager [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Took 41.98 seconds to build instance. [ 1424.467550] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1424.467824] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1424.467995] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Deleting the datastore file [datastore1] 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1424.468296] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-40462810-838d-4119-adbf-7fdd774de0dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.475310] env[63371]: DEBUG oslo_vmware.api [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1424.475310] env[63371]: value = "task-1773798" [ 1424.475310] env[63371]: _type = "Task" [ 1424.475310] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.485078] env[63371]: DEBUG oslo_vmware.api [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773798, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.660092] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52202cc9-4d47-68c7-6e3e-d1d1ad1701e0/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1424.661094] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e0f192-9ea7-4bdb-8684-9caa253e6f19 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.668028] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52202cc9-4d47-68c7-6e3e-d1d1ad1701e0/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1424.668211] env[63371]: ERROR oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52202cc9-4d47-68c7-6e3e-d1d1ad1701e0/disk-0.vmdk due to incomplete transfer. [ 1424.668466] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8003cc65-8bcd-4c5e-b7b1-74c9aad55df8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.684529] env[63371]: DEBUG oslo_vmware.rw_handles [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52202cc9-4d47-68c7-6e3e-d1d1ad1701e0/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1424.684850] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Uploaded image 67c79a69-90fa-469e-b65b-470387ba8d71 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1424.687350] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1424.687600] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-02d8f18f-80d2-449b-9816-9f469e7a7ac6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.694586] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1424.694586] env[63371]: value = "task-1773799" [ 1424.694586] env[63371]: _type = "Task" [ 1424.694586] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.698814] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1424.706081] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773799, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.770569] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 855005ae-3b0e-4ad7-80cf-266075fc6d0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1424.792313] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773797, 'name': CreateVM_Task, 'duration_secs': 0.468492} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.793552] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1424.793856] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.794226] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.794473] env[63371]: DEBUG nova.network.neutron [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1424.796109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1424.796532] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1424.796844] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1424.801023] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6656c820-0041-40b2-8e74-a8acebd2a4b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1424.803520] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1424.803520] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]529417e7-944c-8e33-c92e-8ec1129733f3" [ 1424.803520] env[63371]: _type = "Task" [ 1424.803520] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1424.813366] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529417e7-944c-8e33-c92e-8ec1129733f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1424.840257] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4da0bbd3-d1c5-4423-85e7-d83d08d868e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 79.073s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1424.854419] env[63371]: DEBUG nova.network.neutron [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updated VIF entry in instance network info cache for port d92b8632-8794-486c-a8eb-5c8844009035. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1424.855597] env[63371]: DEBUG nova.network.neutron [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1424.990447] env[63371]: DEBUG oslo_vmware.api [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773798, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.243674} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1424.990837] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1424.991124] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1424.991340] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1424.991529] env[63371]: INFO nova.compute.manager [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1424.991794] env[63371]: DEBUG oslo.service.loopingcall [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1424.992024] env[63371]: DEBUG nova.compute.manager [-] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1424.992123] env[63371]: DEBUG nova.network.neutron [-] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1425.130756] env[63371]: DEBUG nova.network.neutron [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Updated VIF entry in instance network info cache for port 292614c6-49c4-4096-afda-debce88edee1. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1425.131209] env[63371]: DEBUG nova.network.neutron [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Updating instance_info_cache with network_info: [{"id": "292614c6-49c4-4096-afda-debce88edee1", "address": "fa:16:3e:0d:77:2d", "network": {"id": "d9cab695-43ca-4b82-aeb3-d16f79b8cf36", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-890050549-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1bda622ab1474b76a46a5ba68977188b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cca1f087-01e1-49ca-831b-5c51478a5d60", "external-id": "nsx-vlan-transportzone-439", "segmentation_id": 439, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap292614c6-49", "ovs_interfaceid": "292614c6-49c4-4096-afda-debce88edee1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.205144] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773799, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.277354] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 50d5eac1-0752-4089-948c-b04439df6f6c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1425.314529] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529417e7-944c-8e33-c92e-8ec1129733f3, 'name': SearchDatastore_Task, 'duration_secs': 0.075508} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.314802] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.315060] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1425.315744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1425.315744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1425.315744] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1425.316407] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbaa012b-239d-4f9d-958a-0770e418aed3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.332648] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1425.332830] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1425.333645] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b7d40b0-9df3-4aa9-98c4-d5869697d31c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.339199] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1425.339199] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5265edce-116e-6dae-e6b7-079b6b77b768" [ 1425.339199] env[63371]: _type = "Task" [ 1425.339199] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.349024] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1425.349934] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5265edce-116e-6dae-e6b7-079b6b77b768, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.358764] env[63371]: DEBUG oslo_concurrency.lockutils [req-eba9753d-66ed-46ac-8c15-9cb81b8ceab6 req-d4e37073-79ce-41ff-af28-59cf11248a05 service nova] Releasing lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.362604] env[63371]: DEBUG nova.network.neutron [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1425.512115] env[63371]: DEBUG nova.network.neutron [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Updating instance_info_cache with network_info: [{"id": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "address": "fa:16:3e:66:5e:af", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa089f3-fe", "ovs_interfaceid": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.634095] env[63371]: DEBUG oslo_concurrency.lockutils [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] Releasing lock "refresh_cache-9249f27a-1985-4be1-947c-e433c7aa26f1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1425.634459] env[63371]: DEBUG nova.compute.manager [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Received event network-vif-plugged-9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1425.634659] env[63371]: DEBUG oslo_concurrency.lockutils [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] Acquiring lock "b48a8e83-e581-4886-833b-bbce155d40d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1425.634867] env[63371]: DEBUG oslo_concurrency.lockutils [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] Lock "b48a8e83-e581-4886-833b-bbce155d40d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1425.635039] env[63371]: DEBUG oslo_concurrency.lockutils [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] Lock "b48a8e83-e581-4886-833b-bbce155d40d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1425.635289] env[63371]: DEBUG nova.compute.manager [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] No waiting events found dispatching network-vif-plugged-9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1425.635378] env[63371]: WARNING nova.compute.manager [req-198230de-320f-4ff1-86b0-44e4c837cda7 req-8ae883e6-6d17-41b7-a0e5-058961f5b030 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Received unexpected event network-vif-plugged-9aa089f3-fe69-452e-b5e4-4daac745b9bb for instance with vm_state building and task_state spawning. [ 1425.704660] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773799, 'name': Destroy_Task, 'duration_secs': 0.884637} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.704975] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Destroyed the VM [ 1425.705299] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1425.705558] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-364ba79c-f718-4b7f-82c3-e1522782dc43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.711967] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1425.711967] env[63371]: value = "task-1773800" [ 1425.711967] env[63371]: _type = "Task" [ 1425.711967] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.721527] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773800, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.780952] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance be37eb1c-8582-4446-afd6-ae11a8cadf95 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1425.854405] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5265edce-116e-6dae-e6b7-079b6b77b768, 'name': SearchDatastore_Task, 'duration_secs': 0.030781} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1425.857535] env[63371]: DEBUG nova.network.neutron [-] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1425.858963] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a424979-97cc-4ca3-b148-cea690337dd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1425.865855] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1425.865855] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5289ae37-6385-bdae-22d2-95389621d005" [ 1425.865855] env[63371]: _type = "Task" [ 1425.865855] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1425.873700] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5289ae37-6385-bdae-22d2-95389621d005, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1425.877560] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.014577] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.014957] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Instance network_info: |[{"id": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "address": "fa:16:3e:66:5e:af", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa089f3-fe", "ovs_interfaceid": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1426.016029] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:5e:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3ff3baee-99ce-4b51-ae98-efc6163aaab3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9aa089f3-fe69-452e-b5e4-4daac745b9bb', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1426.023598] env[63371]: DEBUG oslo.service.loopingcall [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1426.023817] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1426.024967] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-935e0699-eeb1-41ac-892a-71a08d56b43f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.043018] env[63371]: DEBUG nova.compute.manager [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Received event network-changed-b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1426.043177] env[63371]: DEBUG nova.compute.manager [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Refreshing instance network info cache due to event network-changed-b03ddfde-3b36-43a8-8c6a-00cd704bce22. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1426.043382] env[63371]: DEBUG oslo_concurrency.lockutils [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] Acquiring lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.043521] env[63371]: DEBUG oslo_concurrency.lockutils [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] Acquired lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.043676] env[63371]: DEBUG nova.network.neutron [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Refreshing network info cache for port b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1426.047250] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1426.047250] env[63371]: value = "task-1773801" [ 1426.047250] env[63371]: _type = "Task" [ 1426.047250] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.055186] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773801, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.225984] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773800, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.283978] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 64fc862c-a755-4cac-997b-7a8328638269 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1426.287713] env[63371]: DEBUG nova.compute.manager [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Received event network-changed-9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1426.287950] env[63371]: DEBUG nova.compute.manager [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Refreshing instance network info cache due to event network-changed-9aa089f3-fe69-452e-b5e4-4daac745b9bb. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1426.288180] env[63371]: DEBUG oslo_concurrency.lockutils [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] Acquiring lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.288367] env[63371]: DEBUG oslo_concurrency.lockutils [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] Acquired lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.288491] env[63371]: DEBUG nova.network.neutron [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Refreshing network info cache for port 9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1426.290131] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.290212] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.290699] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.290699] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1426.290794] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1426.294844] env[63371]: INFO nova.compute.manager [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Terminating instance [ 1426.296882] env[63371]: DEBUG nova.compute.manager [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1426.297102] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1426.298819] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d15d7d-9b2b-468b-af55-53fa962263b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.307752] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1426.308010] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8324933-991e-4e8f-9c63-6398167d80f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.315451] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1426.315451] env[63371]: value = "task-1773802" [ 1426.315451] env[63371]: _type = "Task" [ 1426.315451] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.324316] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773802, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.363422] env[63371]: INFO nova.compute.manager [-] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Took 1.37 seconds to deallocate network for instance. [ 1426.381650] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5289ae37-6385-bdae-22d2-95389621d005, 'name': SearchDatastore_Task, 'duration_secs': 0.043757} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.381785] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1426.382247] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9249f27a-1985-4be1-947c-e433c7aa26f1/9249f27a-1985-4be1-947c-e433c7aa26f1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1426.383048] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-252a58f8-6bba-4527-92f4-f277629c3929 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.390076] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1426.390076] env[63371]: value = "task-1773803" [ 1426.390076] env[63371]: _type = "Task" [ 1426.390076] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.403808] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773803, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.559094] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773801, 'name': CreateVM_Task, 'duration_secs': 0.504633} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.559286] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1426.560040] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1426.560224] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1426.560560] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1426.560823] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48dbf780-3fad-4c75-aa73-e5d9f87d7e31 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.567027] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1426.567027] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5241a8c5-56c7-64cf-7b53-0e7d5b01e663" [ 1426.567027] env[63371]: _type = "Task" [ 1426.567027] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.573674] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5241a8c5-56c7-64cf-7b53-0e7d5b01e663, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.731457] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773800, 'name': RemoveSnapshot_Task} progress is 50%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.790988] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance dcf8063b-56eb-439c-bee5-139a1e157714 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1426.831569] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773802, 'name': PowerOffVM_Task, 'duration_secs': 0.216507} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1426.831875] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1426.832062] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1426.832346] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-792d0aff-d04e-4543-8846-b3c0aa20fb2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.878163] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1426.902177] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773803, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1426.903398] env[63371]: DEBUG nova.network.neutron [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updated VIF entry in instance network info cache for port b03ddfde-3b36-43a8-8c6a-00cd704bce22. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1426.903880] env[63371]: DEBUG nova.network.neutron [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updating instance_info_cache with network_info: [{"id": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "address": "fa:16:3e:b7:8f:81", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb03ddfde-3b", "ovs_interfaceid": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1426.907511] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1426.907945] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1426.908261] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Deleting the datastore file [datastore1] ca53accc-a15f-4503-87e5-7cbf3e2c0b43 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1426.910798] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d27ee27-de74-4b52-a0ee-7b54578f78ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1426.917153] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for the task: (returnval){ [ 1426.917153] env[63371]: value = "task-1773805" [ 1426.917153] env[63371]: _type = "Task" [ 1426.917153] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1426.925904] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773805, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.079017] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5241a8c5-56c7-64cf-7b53-0e7d5b01e663, 'name': SearchDatastore_Task, 'duration_secs': 0.008809} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.079017] env[63371]: DEBUG nova.network.neutron [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Updated VIF entry in instance network info cache for port 9aa089f3-fe69-452e-b5e4-4daac745b9bb. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1427.079194] env[63371]: DEBUG nova.network.neutron [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Updating instance_info_cache with network_info: [{"id": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "address": "fa:16:3e:66:5e:af", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa089f3-fe", "ovs_interfaceid": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1427.079653] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.079850] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1427.080153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1427.080337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1427.080554] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1427.080819] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd96bf83-f510-4606-95a3-31f8430f1aae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.089746] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1427.089915] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1427.091264] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65a71bb1-fefb-4f2a-8a70-43e9728cd37a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.101134] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1427.101134] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c3bdf2-26bd-97f1-c4f7-b2222ce84aed" [ 1427.101134] env[63371]: _type = "Task" [ 1427.101134] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.110348] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c3bdf2-26bd-97f1-c4f7-b2222ce84aed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.224023] env[63371]: DEBUG oslo_vmware.api [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773800, 'name': RemoveSnapshot_Task, 'duration_secs': 1.195948} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.224023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1427.224152] env[63371]: INFO nova.compute.manager [None req-31b63560-4bf6-4801-9e01-aff6385941d9 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Took 18.73 seconds to snapshot the instance on the hypervisor. [ 1427.301633] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1427.402028] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773803, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522892} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.402330] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9249f27a-1985-4be1-947c-e433c7aa26f1/9249f27a-1985-4be1-947c-e433c7aa26f1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1427.402575] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1427.402824] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f82928b-8aa3-4ca9-80dd-f7b856bb8419 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.406312] env[63371]: DEBUG oslo_concurrency.lockutils [req-54f6d11b-59fe-4fb4-8cf1-531bfbad99c9 req-4867b93d-7b2e-4c73-8441-20e876314754 service nova] Releasing lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.409036] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1427.409036] env[63371]: value = "task-1773806" [ 1427.409036] env[63371]: _type = "Task" [ 1427.409036] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.416462] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773806, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.426472] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773805, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.584360] env[63371]: DEBUG oslo_concurrency.lockutils [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] Releasing lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1427.584630] env[63371]: DEBUG nova.compute.manager [req-94ce0cdd-449a-4f3f-8891-ccb3fc6cd455 req-0f4a3588-13f4-4694-8265-98cdcffcf292 service nova] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Received event network-vif-deleted-1b8eba67-08ba-47de-bad7-2e38e4a7ea31 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1427.612094] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c3bdf2-26bd-97f1-c4f7-b2222ce84aed, 'name': SearchDatastore_Task, 'duration_secs': 0.011624} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.612991] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0afef573-8665-491d-be1d-81db70ffb883 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.618346] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1427.618346] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52650b37-7c8c-a468-c5d1-2e5f4efd5499" [ 1427.618346] env[63371]: _type = "Task" [ 1427.618346] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.626285] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52650b37-7c8c-a468-c5d1-2e5f4efd5499, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1427.804269] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 7e66011a-4fed-471f-82ea-e1016f92ad39 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1427.918741] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773806, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071588} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.921890] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1427.922647] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a46c6a5d-6ed7-493f-bd7c-c0977011d2ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.930179] env[63371]: DEBUG oslo_vmware.api [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Task: {'id': task-1773805, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.826538} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1427.939330] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1427.939560] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1427.939791] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1427.940013] env[63371]: INFO nova.compute.manager [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1427.940349] env[63371]: DEBUG oslo.service.loopingcall [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1427.949220] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 9249f27a-1985-4be1-947c-e433c7aa26f1/9249f27a-1985-4be1-947c-e433c7aa26f1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1427.949509] env[63371]: DEBUG nova.compute.manager [-] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1427.949605] env[63371]: DEBUG nova.network.neutron [-] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1427.951648] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e14d557e-b3d8-4b23-912f-86784c37632f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1427.974273] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1427.974273] env[63371]: value = "task-1773807" [ 1427.974273] env[63371]: _type = "Task" [ 1427.974273] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1427.982575] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773807, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.133115] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52650b37-7c8c-a468-c5d1-2e5f4efd5499, 'name': SearchDatastore_Task, 'duration_secs': 0.086047} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.133392] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1428.133644] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/b48a8e83-e581-4886-833b-bbce155d40d9.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1428.133903] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd357576-e60c-4e66-9622-939c7b851cc6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.143468] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1428.143468] env[63371]: value = "task-1773808" [ 1428.143468] env[63371]: _type = "Task" [ 1428.143468] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.153061] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.308022] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e6cd62ce-f6d2-4e5b-acbc-7527a94e0932 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1428.486850] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773807, 'name': ReconfigVM_Task, 'duration_secs': 0.372419} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1428.487169] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 9249f27a-1985-4be1-947c-e433c7aa26f1/9249f27a-1985-4be1-947c-e433c7aa26f1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1428.488146] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-90f001b1-72ec-41d9-b2ca-e666af3b39ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1428.496846] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1428.496846] env[63371]: value = "task-1773809" [ 1428.496846] env[63371]: _type = "Task" [ 1428.496846] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1428.507979] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773809, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.655839] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773808, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1428.811586] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 594ff846-8e3e-4882-8ddc-41f824a77a5c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1428.930187] env[63371]: DEBUG nova.compute.manager [req-c9a0e995-19bf-42fe-9edf-e6744c3d8057 req-2ea8f5e8-1114-4055-8f6e-a0341cbb6826 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Received event network-vif-deleted-9a4b63df-9697-47a1-81ad-c69476a80975 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1428.930187] env[63371]: INFO nova.compute.manager [req-c9a0e995-19bf-42fe-9edf-e6744c3d8057 req-2ea8f5e8-1114-4055-8f6e-a0341cbb6826 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Neutron deleted interface 9a4b63df-9697-47a1-81ad-c69476a80975; detaching it from the instance and deleting it from the info cache [ 1428.930187] env[63371]: DEBUG nova.network.neutron [req-c9a0e995-19bf-42fe-9edf-e6744c3d8057 req-2ea8f5e8-1114-4055-8f6e-a0341cbb6826 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.006868] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773809, 'name': Rename_Task, 'duration_secs': 0.25852} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.007101] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1429.007348] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17b39a8a-01c7-4a04-982d-1b1b4049b783 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.013659] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1429.013659] env[63371]: value = "task-1773810" [ 1429.013659] env[63371]: _type = "Task" [ 1429.013659] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.023113] env[63371]: DEBUG nova.network.neutron [-] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1429.027813] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773810, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.155944] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.58773} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.156222] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/b48a8e83-e581-4886-833b-bbce155d40d9.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1429.156432] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1429.156679] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e25c3f0f-6849-46bb-b5dc-b9927224733d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.164585] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1429.164585] env[63371]: value = "task-1773811" [ 1429.164585] env[63371]: _type = "Task" [ 1429.164585] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.176200] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773811, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.241254] env[63371]: DEBUG nova.compute.manager [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1429.242170] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ed36cc-f055-4b80-a129-ea9700c414a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.315291] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance b523486c-adae-4322-80be-1f3bf33ca192 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1429.435922] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0e9af905-e91a-4a80-92f1-4133f37a19bd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.446254] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3e43db-1d66-4d62-80f5-f90521d28325 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.488537] env[63371]: DEBUG nova.compute.manager [req-c9a0e995-19bf-42fe-9edf-e6744c3d8057 req-2ea8f5e8-1114-4055-8f6e-a0341cbb6826 service nova] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Detach interface failed, port_id=9a4b63df-9697-47a1-81ad-c69476a80975, reason: Instance ca53accc-a15f-4503-87e5-7cbf3e2c0b43 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1429.523203] env[63371]: DEBUG oslo_vmware.api [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773810, 'name': PowerOnVM_Task, 'duration_secs': 0.500304} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.523484] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1429.523678] env[63371]: INFO nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Took 7.93 seconds to spawn the instance on the hypervisor. [ 1429.523917] env[63371]: DEBUG nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1429.524680] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a282db-2f3a-4447-9127-3465da6312b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.528805] env[63371]: INFO nova.compute.manager [-] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Took 1.58 seconds to deallocate network for instance. [ 1429.675318] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773811, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070698} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1429.675599] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1429.676370] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19fdefb7-223a-4612-94dc-f8bbe0b5aa80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.698472] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/b48a8e83-e581-4886-833b-bbce155d40d9.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1429.698872] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a76d851-d4a9-41e8-8c2e-58e241f408fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.717975] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1429.717975] env[63371]: value = "task-1773812" [ 1429.717975] env[63371]: _type = "Task" [ 1429.717975] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1429.725779] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773812, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1429.753102] env[63371]: INFO nova.compute.manager [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] instance snapshotting [ 1429.755796] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a85650-4de6-48c1-9d93-5709e6d647f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.776088] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37cccf12-c23c-46b4-9969-8c91401322e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1429.818165] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance f8119ade-7018-4ad8-82fe-baa0a6753c64 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1430.042103] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.044604] env[63371]: INFO nova.compute.manager [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Took 41.73 seconds to build instance. [ 1430.228401] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773812, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.287936] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1430.288407] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-05d380b6-4aee-429e-ba1d-0424503f2216 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.295898] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1430.295898] env[63371]: value = "task-1773813" [ 1430.295898] env[63371]: _type = "Task" [ 1430.295898] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.304614] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773813, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.320748] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1430.321150] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 18 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1430.321223] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4032MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=18 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1430.547591] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d693c636-c94c-48a3-8dad-9c7da290f07b tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.950s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.607143] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "9249f27a-1985-4be1-947c-e433c7aa26f1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.607508] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.607823] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "9249f27a-1985-4be1-947c-e433c7aa26f1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1430.608082] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1430.608305] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1430.610764] env[63371]: INFO nova.compute.manager [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Terminating instance [ 1430.612913] env[63371]: DEBUG nova.compute.manager [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1430.613173] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1430.614210] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5eb2fb-cf95-46be-8dba-c1bda5bb8eb6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.625011] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1430.625011] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e07793a-174a-49f6-8880-2c544d7b15c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.633706] env[63371]: DEBUG oslo_vmware.api [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1430.633706] env[63371]: value = "task-1773814" [ 1430.633706] env[63371]: _type = "Task" [ 1430.633706] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.642619] env[63371]: DEBUG oslo_vmware.api [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773814, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.730737] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773812, 'name': ReconfigVM_Task, 'duration_secs': 0.580319} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1430.731044] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Reconfigured VM instance instance-00000021 to attach disk [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/b48a8e83-e581-4886-833b-bbce155d40d9.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1430.731702] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30042906-4bfb-4f7e-aaf3-430a16d4c92f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.737801] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1430.737801] env[63371]: value = "task-1773815" [ 1430.737801] env[63371]: _type = "Task" [ 1430.737801] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1430.748928] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773815, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.805825] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773813, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1430.845806] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e8ca50c-9537-462c-9dcd-ed4459c63f3e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.855331] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783ae952-2466-435a-8294-c8f7d4f1ee51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.888297] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af922a8-65ee-4225-8ea8-448e542f317c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.896474] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0754d8f2-d367-40f1-88f3-40838d2f6c4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1430.911936] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1431.050747] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1431.144320] env[63371]: DEBUG oslo_vmware.api [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773814, 'name': PowerOffVM_Task, 'duration_secs': 0.184391} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.144592] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1431.144761] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1431.145013] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3f2fae52-4aa4-455e-826f-db8199aa4e41 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.223115] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1431.223389] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1431.223575] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Deleting the datastore file [datastore1] 9249f27a-1985-4be1-947c-e433c7aa26f1 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1431.223837] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c741e09-f63e-44f6-a4d2-bcf5a4150629 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.230659] env[63371]: DEBUG oslo_vmware.api [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for the task: (returnval){ [ 1431.230659] env[63371]: value = "task-1773817" [ 1431.230659] env[63371]: _type = "Task" [ 1431.230659] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.238551] env[63371]: DEBUG oslo_vmware.api [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773817, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.246684] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773815, 'name': Rename_Task, 'duration_secs': 0.133162} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.246947] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1431.247201] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea562062-0fe3-4196-8bcd-b8dc9616178d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.254248] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1431.254248] env[63371]: value = "task-1773818" [ 1431.254248] env[63371]: _type = "Task" [ 1431.254248] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.268253] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773818, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.309508] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773813, 'name': CreateSnapshot_Task, 'duration_secs': 0.646305} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.310221] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1431.310626] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9da2ab5-77f7-418f-85f1-f478d654f290 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.415197] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1431.574907] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1431.743291] env[63371]: DEBUG oslo_vmware.api [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Task: {'id': task-1773817, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.415705} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1431.743706] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1431.744010] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1431.744307] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1431.744576] env[63371]: INFO nova.compute.manager [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1431.744943] env[63371]: DEBUG oslo.service.loopingcall [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1431.745247] env[63371]: DEBUG nova.compute.manager [-] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1431.745399] env[63371]: DEBUG nova.network.neutron [-] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1431.764198] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773818, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.829176] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1431.829511] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-284e07f6-9f27-4ee5-b038-cc1f957a7ef0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1431.838811] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1431.838811] env[63371]: value = "task-1773819" [ 1431.838811] env[63371]: _type = "Task" [ 1431.838811] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1431.847699] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773819, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1431.920764] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1431.921230] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 10.747s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1431.921506] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.931s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1431.923185] env[63371]: INFO nova.compute.claims [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1431.925994] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1431.926300] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1432.045364] env[63371]: DEBUG nova.compute.manager [req-8247e12c-bd41-4ad4-a549-ad1e88b4129d req-4f7666c7-58d0-4b1f-8292-a313dda84498 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Received event network-vif-deleted-292614c6-49c4-4096-afda-debce88edee1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1432.046292] env[63371]: INFO nova.compute.manager [req-8247e12c-bd41-4ad4-a549-ad1e88b4129d req-4f7666c7-58d0-4b1f-8292-a313dda84498 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Neutron deleted interface 292614c6-49c4-4096-afda-debce88edee1; detaching it from the instance and deleting it from the info cache [ 1432.046517] env[63371]: DEBUG nova.network.neutron [req-8247e12c-bd41-4ad4-a549-ad1e88b4129d req-4f7666c7-58d0-4b1f-8292-a313dda84498 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.264869] env[63371]: DEBUG oslo_vmware.api [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773818, 'name': PowerOnVM_Task, 'duration_secs': 0.589685} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1432.265115] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1432.265322] env[63371]: INFO nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Took 9.54 seconds to spawn the instance on the hypervisor. [ 1432.265500] env[63371]: DEBUG nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1432.266675] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f6432ad-713d-4411-b3c0-e77b29977e61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.349534] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773819, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.433227] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] There are 10 instances to clean {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1432.433490] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 7841ebd2-0c23-4e32-8b81-42311a32c6fd] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1432.524904] env[63371]: DEBUG nova.network.neutron [-] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1432.549345] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7da72c05-8059-42af-b458-c321d2f4be20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.558738] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43eb4649-ef1f-4e9d-a995-1b9dfb6cf3b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1432.593373] env[63371]: DEBUG nova.compute.manager [req-8247e12c-bd41-4ad4-a549-ad1e88b4129d req-4f7666c7-58d0-4b1f-8292-a313dda84498 service nova] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Detach interface failed, port_id=292614c6-49c4-4096-afda-debce88edee1, reason: Instance 9249f27a-1985-4be1-947c-e433c7aa26f1 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1432.783212] env[63371]: INFO nova.compute.manager [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Took 43.19 seconds to build instance. [ 1432.850550] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773819, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1432.939614] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 1924d3d2-cc88-4fd2-b509-8463da796658] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1433.028247] env[63371]: INFO nova.compute.manager [-] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Took 1.28 seconds to deallocate network for instance. [ 1433.284744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b86ac1af-2e9d-43f6-93a1-d2c282de6d5f tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "b48a8e83-e581-4886-833b-bbce155d40d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.558s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1433.349506] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773819, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1433.391898] env[63371]: INFO nova.compute.manager [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Rescuing [ 1433.392180] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1433.392341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1433.392501] env[63371]: DEBUG nova.network.neutron [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1433.418440] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064599b0-3006-42e7-ba00-e25b7c03faaf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.427373] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7194cb25-b9db-4012-93d8-b877e33af114 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.457039] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 362d8303-524a-457a-b8d9-2bad87fa816b] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1433.459561] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc245636-d5fe-4e96-8d56-2e342ce0b356 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.467041] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b69ed3-1ed6-4bbb-b39c-9288f3c8c816 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.480375] env[63371]: DEBUG nova.compute.provider_tree [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1433.534965] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1433.789526] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1433.850869] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773819, 'name': CloneVM_Task, 'duration_secs': 1.645343} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1433.851170] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Created linked-clone VM from snapshot [ 1433.851926] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53084246-d033-4002-b28c-98f274da9a86 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.859448] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Uploading image 40f4ba8c-3aff-4162-89c5-27a0765d4f79 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1433.881805] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1433.881805] env[63371]: value = "vm-368292" [ 1433.881805] env[63371]: _type = "VirtualMachine" [ 1433.881805] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1433.882101] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d46c4c5e-98e9-44d3-9db6-f4283fb92981 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1433.889621] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lease: (returnval){ [ 1433.889621] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce5356-d67c-7faa-3f7a-0b8f4cf702c0" [ 1433.889621] env[63371]: _type = "HttpNfcLease" [ 1433.889621] env[63371]: } obtained for exporting VM: (result){ [ 1433.889621] env[63371]: value = "vm-368292" [ 1433.889621] env[63371]: _type = "VirtualMachine" [ 1433.889621] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1433.889621] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the lease: (returnval){ [ 1433.889621] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce5356-d67c-7faa-3f7a-0b8f4cf702c0" [ 1433.889621] env[63371]: _type = "HttpNfcLease" [ 1433.889621] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1433.897604] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1433.897604] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce5356-d67c-7faa-3f7a-0b8f4cf702c0" [ 1433.897604] env[63371]: _type = "HttpNfcLease" [ 1433.897604] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1433.963351] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 4d9d43a2-6ed4-4912-a1cc-1c0af360bd5d] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1433.984585] env[63371]: DEBUG nova.scheduler.client.report [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1434.121923] env[63371]: DEBUG nova.network.neutron [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Updating instance_info_cache with network_info: [{"id": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "address": "fa:16:3e:66:5e:af", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa089f3-fe", "ovs_interfaceid": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1434.313735] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1434.398289] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1434.398289] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce5356-d67c-7faa-3f7a-0b8f4cf702c0" [ 1434.398289] env[63371]: _type = "HttpNfcLease" [ 1434.398289] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1434.398652] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1434.398652] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce5356-d67c-7faa-3f7a-0b8f4cf702c0" [ 1434.398652] env[63371]: _type = "HttpNfcLease" [ 1434.398652] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1434.399521] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2494d5-12ab-4db8-8dba-1e80efc6b51c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.407756] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fcfe08-d5f4-e03c-b80c-0dfc276adab7/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1434.407986] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fcfe08-d5f4-e03c-b80c-0dfc276adab7/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1434.466901] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: d9523239-79d1-434f-977a-e1f0e358c82b] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1434.491504] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.492049] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1434.495416] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.467s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.495597] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1434.498083] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.728s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1434.499542] env[63371]: INFO nova.compute.claims [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1434.509775] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-22cde28f-ef68-4632-ad6f-b46248d14cf7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1434.522818] env[63371]: INFO nova.scheduler.client.report [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Deleted allocations for instance af1281ba-c3be-43b4-a039-86d94bd9efe4 [ 1434.624960] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1434.971901] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: fc0715a1-a056-4a1b-a86e-959680effc97] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1435.004107] env[63371]: DEBUG nova.compute.utils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1435.007728] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1435.007899] env[63371]: DEBUG nova.network.neutron [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1435.034943] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ddbe4ea8-321c-4542-b5d4-6603c0606993 tempest-AttachInterfacesV270Test-760437314 tempest-AttachInterfacesV270Test-760437314-project-member] Lock "af1281ba-c3be-43b4-a039-86d94bd9efe4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.476s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1435.049237] env[63371]: DEBUG nova.policy [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '338e5dcf03fa465484fbc3c9cf1ccd83', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '941e719c77a84e8d8fe0107968a0f527', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1435.163747] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1435.163747] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-217825c4-82ab-4bf1-8890-ffcae84a113f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.172217] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1435.172217] env[63371]: value = "task-1773821" [ 1435.172217] env[63371]: _type = "Task" [ 1435.172217] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.181841] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773821, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.474752] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ca202079-2eae-441e-80f6-e403497e137d] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1435.514357] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1435.583911] env[63371]: DEBUG nova.network.neutron [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Successfully created port: 1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1435.683443] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773821, 'name': PowerOffVM_Task, 'duration_secs': 0.200102} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1435.686243] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1435.687297] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4754b9c1-e9ce-4f45-907e-9de88bdaebd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.716333] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac8cfdb9-d647-49bf-83a4-07b30b35c896 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.754156] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1435.756154] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e7806ea-0d7f-414a-a1f6-48cda3319b7f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.762773] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1435.762773] env[63371]: value = "task-1773822" [ 1435.762773] env[63371]: _type = "Task" [ 1435.762773] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.775486] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1435.775793] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1435.776674] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1435.776674] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1435.776674] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1435.776674] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0eb9c6e8-560a-4099-8f35-b68dc8290b18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.788271] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1435.788496] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1435.789438] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b714b56-8b65-4ef9-9c31-0089755c509e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1435.795296] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1435.795296] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5270ea18-d71f-b9f0-d072-64731040972c" [ 1435.795296] env[63371]: _type = "Task" [ 1435.795296] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1435.809176] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5270ea18-d71f-b9f0-d072-64731040972c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1435.982038] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3ce12ea9-6fde-4d6f-9b1c-d8b8a2786094] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1436.150990] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212aa88e-fbf3-411e-b698-889c546d2572 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.163198] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca015d3e-3530-49cd-ad62-1074050e8862 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.197022] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedf0bf5-575e-456b-a5ac-68f97509b372 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.204941] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e22940d-e55c-419e-a9c4-73ca150013f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.221545] env[63371]: DEBUG nova.compute.provider_tree [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1436.307775] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5270ea18-d71f-b9f0-d072-64731040972c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.485753] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: a43fed87-5205-4148-834e-66778a90b7bc] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1436.524389] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1436.561887] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1436.561887] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1436.561887] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1436.563458] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1436.563627] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1436.563822] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1436.564062] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1436.564242] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1436.564411] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1436.564571] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1436.564741] env[63371]: DEBUG nova.virt.hardware [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1436.565674] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69de49b-2027-4b17-88c3-765c2e45c9fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.583367] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d607497-5ed6-4b50-8a58-62348541ccfa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.725862] env[63371]: DEBUG nova.scheduler.client.report [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1436.810039] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5270ea18-d71f-b9f0-d072-64731040972c, 'name': SearchDatastore_Task, 'duration_secs': 0.567599} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1436.810039] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cbc0b05-ae8d-4462-a22e-ae0321343663 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1436.816637] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1436.816637] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5287dc04-6a8c-f610-ea3b-a103f5f1c793" [ 1436.816637] env[63371]: _type = "Task" [ 1436.816637] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1436.825927] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5287dc04-6a8c-f610-ea3b-a103f5f1c793, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1436.990293] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: cffe6a79-ad7e-4488-b179-608a03c978aa] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1437.077948] env[63371]: DEBUG nova.compute.manager [req-5b76369a-83f4-4487-8e8d-06e0b5877aeb req-6216e4a0-3e1f-44b2-b9c2-f1554fc492bc service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Received event network-vif-plugged-1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1437.078242] env[63371]: DEBUG oslo_concurrency.lockutils [req-5b76369a-83f4-4487-8e8d-06e0b5877aeb req-6216e4a0-3e1f-44b2-b9c2-f1554fc492bc service nova] Acquiring lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1437.078530] env[63371]: DEBUG oslo_concurrency.lockutils [req-5b76369a-83f4-4487-8e8d-06e0b5877aeb req-6216e4a0-3e1f-44b2-b9c2-f1554fc492bc service nova] Lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.078699] env[63371]: DEBUG oslo_concurrency.lockutils [req-5b76369a-83f4-4487-8e8d-06e0b5877aeb req-6216e4a0-3e1f-44b2-b9c2-f1554fc492bc service nova] Lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.078866] env[63371]: DEBUG nova.compute.manager [req-5b76369a-83f4-4487-8e8d-06e0b5877aeb req-6216e4a0-3e1f-44b2-b9c2-f1554fc492bc service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] No waiting events found dispatching network-vif-plugged-1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1437.079040] env[63371]: WARNING nova.compute.manager [req-5b76369a-83f4-4487-8e8d-06e0b5877aeb req-6216e4a0-3e1f-44b2-b9c2-f1554fc492bc service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Received unexpected event network-vif-plugged-1d08ea03-4a7c-43bc-9a11-db1f92c6c505 for instance with vm_state building and task_state spawning. [ 1437.220414] env[63371]: DEBUG nova.network.neutron [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Successfully updated port: 1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1437.232228] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.734s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.232845] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1437.235882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.243s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.239025] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.239025] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.750s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1437.240213] env[63371]: INFO nova.compute.claims [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1437.276710] env[63371]: INFO nova.scheduler.client.report [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Deleted allocations for instance 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406 [ 1437.329318] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5287dc04-6a8c-f610-ea3b-a103f5f1c793, 'name': SearchDatastore_Task, 'duration_secs': 0.334403} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1437.329599] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1437.330092] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. {{(pid=63371) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1437.330377] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c0c13e5-d7f8-4737-b76b-68116c11da09 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1437.341384] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1437.341384] env[63371]: value = "task-1773823" [ 1437.341384] env[63371]: _type = "Task" [ 1437.341384] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1437.349769] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.492100] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1437.492394] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances with incomplete migration {{(pid=63371) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1437.721515] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1437.721725] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquired lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1437.721987] env[63371]: DEBUG nova.network.neutron [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1437.737717] env[63371]: DEBUG nova.compute.utils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1437.739108] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1437.739569] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1437.788384] env[63371]: DEBUG oslo_concurrency.lockutils [None req-62f5a08c-f842-4f74-8153-7e2c45a3572a tempest-ImagesNegativeTestJSON-879545503 tempest-ImagesNegativeTestJSON-879545503-project-member] Lock "3f79bc3e-4dd4-4b5f-a5ba-a17124e70406" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.938s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1437.817484] env[63371]: DEBUG nova.policy [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d34f2f64ede4652b10ec546783e859c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd0696c715249779160762b8ecd83e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1437.853171] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773823, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1437.994789] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1438.169906] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Successfully created port: e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1438.246723] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1438.273226] env[63371]: DEBUG nova.network.neutron [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1438.355289] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773823, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.975449} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.355571] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. [ 1438.356413] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331ea51a-7379-4bf7-8825-9524bcb3d0f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.393475] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1438.397598] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efc2873d-2ded-46e3-9e4c-15815571fee1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.419907] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1438.419907] env[63371]: value = "task-1773824" [ 1438.419907] env[63371]: _type = "Task" [ 1438.419907] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1438.440593] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773824, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1438.662197] env[63371]: DEBUG oslo_concurrency.lockutils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1438.662197] env[63371]: DEBUG oslo_concurrency.lockutils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1438.664319] env[63371]: DEBUG nova.network.neutron [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Updating instance_info_cache with network_info: [{"id": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "address": "fa:16:3e:ac:de:06", "network": {"id": "50743102-4d46-4fa2-b7b2-9d99ef746757", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-137039111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "941e719c77a84e8d8fe0107968a0f527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d08ea03-4a", "ovs_interfaceid": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1438.922464] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5af7c9c-9fb2-410a-afb2-a7c8eeb27051 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.937323] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773824, 'name': ReconfigVM_Task, 'duration_secs': 0.513661} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1438.938524] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61debdc7-43aa-4a99-96ac-0496bc197608 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.942329] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Reconfigured VM instance instance-00000021 to attach disk [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1438.943630] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93a0549-2794-44df-ba07-fc9216b314a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.996861] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-405087bd-53b3-42af-8f5c-1659d20e6003 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.008286] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4ffe43-d37d-4e0f-b1ee-81de9e1f236d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.018864] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4153e71-6d7f-4351-9524-28b22b707223 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.023311] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1439.023311] env[63371]: value = "task-1773825" [ 1439.023311] env[63371]: _type = "Task" [ 1439.023311] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.036267] env[63371]: DEBUG nova.compute.provider_tree [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1439.041044] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773825, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.168715] env[63371]: DEBUG nova.compute.utils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1439.170408] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Releasing lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1439.172046] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Instance network_info: |[{"id": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "address": "fa:16:3e:ac:de:06", "network": {"id": "50743102-4d46-4fa2-b7b2-9d99ef746757", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-137039111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "941e719c77a84e8d8fe0107968a0f527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d08ea03-4a", "ovs_interfaceid": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1439.172179] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:de:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ffcecdaa-a7b8-49fc-9371-dbdb7744688e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1d08ea03-4a7c-43bc-9a11-db1f92c6c505', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1439.184029] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Creating folder: Project (941e719c77a84e8d8fe0107968a0f527). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1439.184029] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f98eb8e7-d91c-42bd-9c9b-02e0280c9cf1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.198570] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Created folder: Project (941e719c77a84e8d8fe0107968a0f527) in parent group-v368199. [ 1439.198570] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Creating folder: Instances. Parent ref: group-v368293. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1439.198570] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-faabab78-8e20-4cbd-9e5e-7b12973b7e42 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.211439] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Created folder: Instances in parent group-v368293. [ 1439.211740] env[63371]: DEBUG oslo.service.loopingcall [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1439.213200] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1439.213200] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e57d2ef-4e9b-4aac-ae68-a49e33fe6287 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.234571] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1439.234571] env[63371]: value = "task-1773828" [ 1439.234571] env[63371]: _type = "Task" [ 1439.234571] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.248808] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773828, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.264117] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1439.296706] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1439.297023] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1439.297583] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1439.297810] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1439.298076] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1439.298237] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1439.298451] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1439.298607] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1439.298805] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1439.299144] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1439.299358] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1439.301238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43824c43-bfaa-40b9-8f38-f5dac472f309 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.311732] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9827f8-642e-4273-8392-03caec6fd36b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.482508] env[63371]: DEBUG nova.compute.manager [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Received event network-changed-1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1439.482745] env[63371]: DEBUG nova.compute.manager [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Refreshing instance network info cache due to event network-changed-1d08ea03-4a7c-43bc-9a11-db1f92c6c505. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1439.482957] env[63371]: DEBUG oslo_concurrency.lockutils [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] Acquiring lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.483226] env[63371]: DEBUG oslo_concurrency.lockutils [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] Acquired lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.483406] env[63371]: DEBUG nova.network.neutron [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Refreshing network info cache for port 1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1439.497121] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.498099] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1439.536325] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773825, 'name': ReconfigVM_Task, 'duration_secs': 0.310548} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.536480] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1439.536706] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-614ca51c-e30b-40ff-a633-7c45f7b75ec7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.542967] env[63371]: DEBUG nova.scheduler.client.report [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1439.550801] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1439.550801] env[63371]: value = "task-1773829" [ 1439.550801] env[63371]: _type = "Task" [ 1439.550801] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.561742] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773829, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.672875] env[63371]: DEBUG oslo_concurrency.lockutils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1439.750233] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773828, 'name': CreateVM_Task, 'duration_secs': 0.494554} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.750432] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1439.751601] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1439.751770] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1439.752171] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1439.752455] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8aae3ee-d2e8-4988-9675-3e8a09c9eb49 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.760422] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1439.760422] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52994082-ae5d-dc32-da93-ee44f4ea8759" [ 1439.760422] env[63371]: _type = "Task" [ 1439.760422] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.771894] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52994082-ae5d-dc32-da93-ee44f4ea8759, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.012040] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1440.012298] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1440.012420] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1440.050544] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.812s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.051086] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1440.056903] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.619s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.061346] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.063456] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.435s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.064984] env[63371]: INFO nova.compute.claims [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1440.087241] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773829, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.123595] env[63371]: INFO nova.scheduler.client.report [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Deleted allocations for instance 713dfaf5-d11f-4af2-af92-66a596b0ed4a [ 1440.279057] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52994082-ae5d-dc32-da93-ee44f4ea8759, 'name': SearchDatastore_Task, 'duration_secs': 0.013948} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.279915] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Successfully updated port: e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1440.282744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.282744] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1440.282744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.282744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.282883] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1440.282883] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4002938b-98e8-4454-9fc0-4297b23ed862 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.297564] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1440.298122] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1440.299298] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73af1f01-d624-486f-9a4d-01c5dd4028dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.309021] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1440.309021] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a6aa72-4bec-1651-7cdb-82cb05b1d260" [ 1440.309021] env[63371]: _type = "Task" [ 1440.309021] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.325640] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a6aa72-4bec-1651-7cdb-82cb05b1d260, 'name': SearchDatastore_Task, 'duration_secs': 0.013208} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.329344] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06f593a9-e194-4982-ad9b-397366cdd1a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.344059] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1440.344059] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52046a1a-3c66-6767-b285-952619451de4" [ 1440.344059] env[63371]: _type = "Task" [ 1440.344059] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.356226] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52046a1a-3c66-6767-b285-952619451de4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1440.376874] env[63371]: DEBUG nova.network.neutron [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Updated VIF entry in instance network info cache for port 1d08ea03-4a7c-43bc-9a11-db1f92c6c505. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1440.377247] env[63371]: DEBUG nova.network.neutron [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Updating instance_info_cache with network_info: [{"id": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "address": "fa:16:3e:ac:de:06", "network": {"id": "50743102-4d46-4fa2-b7b2-9d99ef746757", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-137039111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "941e719c77a84e8d8fe0107968a0f527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d08ea03-4a", "ovs_interfaceid": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1440.522270] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Skipping network cache update for instance because it is Building. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1440.522270] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Skipping network cache update for instance because it is Building. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1440.522270] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Skipping network cache update for instance because it is Building. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 1440.538965] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.538965] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquired lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.538965] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Forcefully refreshing network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1440.538965] env[63371]: DEBUG nova.objects.instance [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lazy-loading 'info_cache' on Instance uuid ca53accc-a15f-4503-87e5-7cbf3e2c0b43 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1440.557639] env[63371]: DEBUG nova.compute.utils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1440.561683] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1440.562179] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1440.572745] env[63371]: DEBUG oslo_vmware.api [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773829, 'name': PowerOnVM_Task, 'duration_secs': 0.59941} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.573388] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1440.576310] env[63371]: DEBUG nova.compute.manager [None req-bbce6232-8fb0-4cd1-a036-2549df6e16b9 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1440.580113] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0fa30e8-1837-465f-b53b-9662e19fe141 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.634021] env[63371]: DEBUG nova.policy [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d34f2f64ede4652b10ec546783e859c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd0696c715249779160762b8ecd83e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1440.639147] env[63371]: DEBUG oslo_concurrency.lockutils [None req-63796084-8127-4f2d-96a4-7663f66dddea tempest-ServerRescueTestJSONUnderV235-1224768342 tempest-ServerRescueTestJSONUnderV235-1224768342-project-member] Lock "713dfaf5-d11f-4af2-af92-66a596b0ed4a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.269s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1440.784597] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "refresh_cache-b5e259ea-d103-41c6-84b3-748813bb514d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1440.784886] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "refresh_cache-b5e259ea-d103-41c6-84b3-748813bb514d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1440.784931] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1440.806322] env[63371]: DEBUG oslo_concurrency.lockutils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1440.806322] env[63371]: DEBUG oslo_concurrency.lockutils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1440.806322] env[63371]: INFO nova.compute.manager [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Attaching volume fd8f0908-509b-4986-8eae-d6db5f10b561 to /dev/sdb [ 1440.853488] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c237f4b1-d2ba-45e9-98da-e0e4d50d8e6b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.866515] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52046a1a-3c66-6767-b285-952619451de4, 'name': SearchDatastore_Task, 'duration_secs': 0.015461} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1440.869077] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.869395] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e8bd5802-d2ff-4348-92d4-c23277f4eaeb/e8bd5802-d2ff-4348-92d4-c23277f4eaeb.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1440.869804] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16df55a6-6ba9-4555-b70c-2a048c16fa2c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.873123] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21692c40-ba65-4e0b-a2cb-3dec0b3ffede {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.881217] env[63371]: DEBUG oslo_concurrency.lockutils [req-225db845-5077-4402-8329-f6121ce7e02a req-2a4740cd-bb0f-449a-b718-38bd590bb128 service nova] Releasing lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1440.884376] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1440.884376] env[63371]: value = "task-1773830" [ 1440.884376] env[63371]: _type = "Task" [ 1440.884376] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1440.900927] env[63371]: DEBUG nova.virt.block_device [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updating existing volume attachment record: 71bc6b72-7d4c-4e64-9e3b-5d56e88566ea {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1440.903385] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773830, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.065644] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1441.267683] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Successfully created port: d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1441.372133] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1441.396872] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773830, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1441.597298] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1441.602924] env[63371]: DEBUG nova.compute.manager [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Received event network-vif-plugged-e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1441.603157] env[63371]: DEBUG oslo_concurrency.lockutils [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] Acquiring lock "b5e259ea-d103-41c6-84b3-748813bb514d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1441.603358] env[63371]: DEBUG oslo_concurrency.lockutils [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] Lock "b5e259ea-d103-41c6-84b3-748813bb514d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1441.603522] env[63371]: DEBUG oslo_concurrency.lockutils [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] Lock "b5e259ea-d103-41c6-84b3-748813bb514d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1441.603679] env[63371]: DEBUG nova.compute.manager [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] No waiting events found dispatching network-vif-plugged-e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1441.603830] env[63371]: WARNING nova.compute.manager [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Received unexpected event network-vif-plugged-e450db3c-69a8-4e46-817e-eb4d6310fb9d for instance with vm_state building and task_state spawning. [ 1441.603991] env[63371]: DEBUG nova.compute.manager [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Received event network-changed-e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1441.604290] env[63371]: DEBUG nova.compute.manager [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Refreshing instance network info cache due to event network-changed-e450db3c-69a8-4e46-817e-eb4d6310fb9d. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1441.604476] env[63371]: DEBUG oslo_concurrency.lockutils [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] Acquiring lock "refresh_cache-b5e259ea-d103-41c6-84b3-748813bb514d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1441.742438] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ce673f2-34ad-4524-830c-0f456e8635df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.751754] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b90fb52-8e41-43d7-bddf-cc502bcda3c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.789025] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Updating instance_info_cache with network_info: [{"id": "e450db3c-69a8-4e46-817e-eb4d6310fb9d", "address": "fa:16:3e:04:f5:99", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape450db3c-69", "ovs_interfaceid": "e450db3c-69a8-4e46-817e-eb4d6310fb9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1441.789025] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-066686d5-4a65-49d1-8fc4-fd8e4ac79693 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.798089] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090222db-3c29-4b1e-b38d-c1f1414ec69c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.815756] env[63371]: DEBUG nova.compute.provider_tree [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1441.903731] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773830, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.628335} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1441.903989] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e8bd5802-d2ff-4348-92d4-c23277f4eaeb/e8bd5802-d2ff-4348-92d4-c23277f4eaeb.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1441.904270] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1441.904580] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3fc57df6-4692-4b50-be57-7c489029ab58 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1441.913730] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1441.913730] env[63371]: value = "task-1773834" [ 1441.913730] env[63371]: _type = "Task" [ 1441.913730] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1441.928552] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773834, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.080703] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1442.109771] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1442.109996] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1442.110167] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1442.110354] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1442.110503] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1442.110649] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1442.110862] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1442.111048] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1442.111369] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1442.111454] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1442.111677] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1442.113174] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b482f69-f3e4-43da-be76-bfb1c006a034 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.122431] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fda759-5d1c-485e-8e25-c435c9af5dab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.292107] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "refresh_cache-b5e259ea-d103-41c6-84b3-748813bb514d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1442.292526] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Instance network_info: |[{"id": "e450db3c-69a8-4e46-817e-eb4d6310fb9d", "address": "fa:16:3e:04:f5:99", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape450db3c-69", "ovs_interfaceid": "e450db3c-69a8-4e46-817e-eb4d6310fb9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1442.292846] env[63371]: DEBUG oslo_concurrency.lockutils [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] Acquired lock "refresh_cache-b5e259ea-d103-41c6-84b3-748813bb514d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.293033] env[63371]: DEBUG nova.network.neutron [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Refreshing network info cache for port e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1442.298620] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:f5:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2a75bb6e-6331-4429-b1b9-c968cc22b9c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e450db3c-69a8-4e46-817e-eb4d6310fb9d', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1442.313929] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Creating folder: Project (3cd0696c715249779160762b8ecd83e6). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1442.318632] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6fc283f3-462a-4cfe-af57-da9677687c68 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.322381] env[63371]: DEBUG nova.scheduler.client.report [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1442.341774] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Created folder: Project (3cd0696c715249779160762b8ecd83e6) in parent group-v368199. [ 1442.342032] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Creating folder: Instances. Parent ref: group-v368299. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1442.342328] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70631bdd-3d9e-4d9e-8109-bead086b9caa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.357306] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Created folder: Instances in parent group-v368299. [ 1442.357700] env[63371]: DEBUG oslo.service.loopingcall [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1442.357924] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1442.358365] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aa494e71-05c7-439d-b21b-97e3e2ac558b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.382169] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1442.382169] env[63371]: value = "task-1773837" [ 1442.382169] env[63371]: _type = "Task" [ 1442.382169] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.397550] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773837, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.429827] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773834, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081263} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.430302] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1442.431516] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7bc494c-6ec3-4495-8c21-65332c13df24 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.455771] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] e8bd5802-d2ff-4348-92d4-c23277f4eaeb/e8bd5802-d2ff-4348-92d4-c23277f4eaeb.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1442.456125] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18431aa5-f976-4ac9-bed7-d8c47a7ab538 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1442.480459] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1442.480459] env[63371]: value = "task-1773838" [ 1442.480459] env[63371]: _type = "Task" [ 1442.480459] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1442.491610] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773838, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.574593] env[63371]: INFO nova.compute.manager [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Unrescuing [ 1442.575199] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1442.575199] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquired lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1442.575324] env[63371]: DEBUG nova.network.neutron [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1442.602638] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.829118] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.765s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.829118] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1442.833290] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.178s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.833458] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1442.836354] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.887s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1442.842784] env[63371]: INFO nova.compute.claims [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1442.876335] env[63371]: INFO nova.scheduler.client.report [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Deleted allocations for instance cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1 [ 1442.902788] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773837, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1442.974331] env[63371]: DEBUG nova.network.neutron [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Updated VIF entry in instance network info cache for port e450db3c-69a8-4e46-817e-eb4d6310fb9d. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1442.974712] env[63371]: DEBUG nova.network.neutron [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Updating instance_info_cache with network_info: [{"id": "e450db3c-69a8-4e46-817e-eb4d6310fb9d", "address": "fa:16:3e:04:f5:99", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape450db3c-69", "ovs_interfaceid": "e450db3c-69a8-4e46-817e-eb4d6310fb9d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.996381] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773838, 'name': ReconfigVM_Task, 'duration_secs': 0.367884} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1442.997958] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Reconfigured VM instance instance-00000022 to attach disk [datastore1] e8bd5802-d2ff-4348-92d4-c23277f4eaeb/e8bd5802-d2ff-4348-92d4-c23277f4eaeb.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1442.997958] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c62e2a16-f661-4269-b0b3-516d7a10fb96 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.006799] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1443.006799] env[63371]: value = "task-1773839" [ 1443.006799] env[63371]: _type = "Task" [ 1443.006799] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.021025] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773839, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.110545] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Releasing lock "refresh_cache-ca53accc-a15f-4503-87e5-7cbf3e2c0b43" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.110545] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Updated the network info_cache for instance {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1443.110545] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.110545] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.110545] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.110545] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.110792] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.110792] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.110792] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1443.110792] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1443.346220] env[63371]: DEBUG nova.compute.utils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1443.353999] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1443.356469] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1443.393245] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2277587-594b-41fe-8069-0659ac909481 tempest-ServerShowV247Test-1597920850 tempest-ServerShowV247Test-1597920850-project-member] Lock "cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.404263] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773837, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.481192] env[63371]: DEBUG oslo_concurrency.lockutils [req-658b83a7-fc59-490b-9f53-507d445c5d99 req-0787f4f1-4640-4717-84c2-2e7a0895b026 service nova] Releasing lock "refresh_cache-b5e259ea-d103-41c6-84b3-748813bb514d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1443.522031] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773839, 'name': Rename_Task, 'duration_secs': 0.204079} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.522172] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1443.522374] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abde7d01-595a-4200-afd2-de09c2fbaf62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.533356] env[63371]: DEBUG nova.policy [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7d34f2f64ede4652b10ec546783e859c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd0696c715249779160762b8ecd83e6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1443.535966] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1443.535966] env[63371]: value = "task-1773841" [ 1443.535966] env[63371]: _type = "Task" [ 1443.535966] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.547265] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773841, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.615331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.807396] env[63371]: DEBUG nova.compute.manager [req-350813a3-09db-4597-b2ce-5b756edcc819 req-38d4c2c3-ec40-4936-b3de-aa8fb27e8240 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Received event network-vif-plugged-d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1443.808632] env[63371]: DEBUG oslo_concurrency.lockutils [req-350813a3-09db-4597-b2ce-5b756edcc819 req-38d4c2c3-ec40-4936-b3de-aa8fb27e8240 service nova] Acquiring lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1443.808632] env[63371]: DEBUG oslo_concurrency.lockutils [req-350813a3-09db-4597-b2ce-5b756edcc819 req-38d4c2c3-ec40-4936-b3de-aa8fb27e8240 service nova] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1443.808632] env[63371]: DEBUG oslo_concurrency.lockutils [req-350813a3-09db-4597-b2ce-5b756edcc819 req-38d4c2c3-ec40-4936-b3de-aa8fb27e8240 service nova] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1443.808632] env[63371]: DEBUG nova.compute.manager [req-350813a3-09db-4597-b2ce-5b756edcc819 req-38d4c2c3-ec40-4936-b3de-aa8fb27e8240 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] No waiting events found dispatching network-vif-plugged-d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1443.808632] env[63371]: WARNING nova.compute.manager [req-350813a3-09db-4597-b2ce-5b756edcc819 req-38d4c2c3-ec40-4936-b3de-aa8fb27e8240 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Received unexpected event network-vif-plugged-d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 for instance with vm_state building and task_state spawning. [ 1443.824371] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fcfe08-d5f4-e03c-b80c-0dfc276adab7/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1443.826123] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b83a874-2b65-4177-afd5-aeace9bbdbda {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.836322] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fcfe08-d5f4-e03c-b80c-0dfc276adab7/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1443.836322] env[63371]: ERROR oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fcfe08-d5f4-e03c-b80c-0dfc276adab7/disk-0.vmdk due to incomplete transfer. [ 1443.836650] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-39d9819a-7600-48ce-9481-67e9324e832b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.845241] env[63371]: DEBUG oslo_vmware.rw_handles [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52fcfe08-d5f4-e03c-b80c-0dfc276adab7/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1443.845519] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Uploaded image 40f4ba8c-3aff-4162-89c5-27a0765d4f79 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1443.847674] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1443.848300] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-089ebcbc-1c7c-4467-89a2-dcdcca6395dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.853965] env[63371]: DEBUG nova.network.neutron [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Updating instance_info_cache with network_info: [{"id": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "address": "fa:16:3e:66:5e:af", "network": {"id": "348d6ae0-6f16-46bf-8dea-bdcdc95316aa", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-1513389745-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "c0f2fde472b14ab9a4d20947ca714191", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3ff3baee-99ce-4b51-ae98-efc6163aaab3", "external-id": "nsx-vlan-transportzone-574", "segmentation_id": 574, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa089f3-fe", "ovs_interfaceid": "9aa089f3-fe69-452e-b5e4-4daac745b9bb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1443.856283] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1443.860320] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1443.860320] env[63371]: value = "task-1773842" [ 1443.860320] env[63371]: _type = "Task" [ 1443.860320] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.876233] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773842, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1443.902108] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773837, 'name': CreateVM_Task, 'duration_secs': 1.100084} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1443.902303] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1443.903197] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1443.903456] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1443.903698] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1443.904304] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4163bac7-af39-4d18-a2fe-c784ff80d942 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.912681] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1443.912681] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c5f9a8-99c8-6f2b-e4ce-dae1135e4a23" [ 1443.912681] env[63371]: _type = "Task" [ 1443.912681] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.921733] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c5f9a8-99c8-6f2b-e4ce-dae1135e4a23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.049878] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773841, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.147531] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Successfully updated port: d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1444.317928] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Successfully created port: 767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1444.329884] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1444.330172] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1444.355650] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Releasing lock "refresh_cache-b48a8e83-e581-4886-833b-bbce155d40d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.357496] env[63371]: DEBUG nova.objects.instance [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lazy-loading 'flavor' on Instance uuid b48a8e83-e581-4886-833b-bbce155d40d9 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1444.376238] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773842, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.425703] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c5f9a8-99c8-6f2b-e4ce-dae1135e4a23, 'name': SearchDatastore_Task, 'duration_secs': 0.014635} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.429535] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1444.429813] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1444.430064] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.430302] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.430481] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1444.431396] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6b7fad1-3b44-4738-9a90-81aa1d7669c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.446376] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1444.446376] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1444.448046] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-939063f3-c3a1-4b00-b3ee-6b313b4c2ec0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.454853] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1444.454853] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52eaafd6-ced0-73cc-2356-680d1ecf9244" [ 1444.454853] env[63371]: _type = "Task" [ 1444.454853] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.468570] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52eaafd6-ced0-73cc-2356-680d1ecf9244, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.508610] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a807119-41cd-499e-860a-e164376762c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.518606] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b500b0-f3d3-4cc9-9528-d0f38e2bdec5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.568636] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05b0edd-1576-452f-819a-1475f235224b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.584019] env[63371]: DEBUG oslo_vmware.api [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1773841, 'name': PowerOnVM_Task, 'duration_secs': 0.931959} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.584549] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1444.584656] env[63371]: INFO nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Took 8.06 seconds to spawn the instance on the hypervisor. [ 1444.584834] env[63371]: DEBUG nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1444.586193] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f09d57-2e05-4262-803c-7f152f611715 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.591687] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c843b0-b449-4ead-a316-e45f0104b34c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.610275] env[63371]: DEBUG nova.compute.provider_tree [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1444.650014] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "refresh_cache-fb2ddd3e-7adc-4a34-8797-0e98fdf19379" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1444.650211] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "refresh_cache-fb2ddd3e-7adc-4a34-8797-0e98fdf19379" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1444.650373] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1444.863471] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18da847d-27f8-4471-8945-7e0e5b83fce5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.870419] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1444.883962] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773842, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.908235] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1444.908936] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc964641-11f0-4b78-9d3e-ece50123b444 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.918058] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1444.918058] env[63371]: value = "task-1773843" [ 1444.918058] env[63371]: _type = "Task" [ 1444.918058] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.920241] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1444.920503] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1444.920675] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1444.920877] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1444.921040] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1444.921194] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1444.921397] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1444.921553] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1444.921712] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1444.923197] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1444.923197] env[63371]: DEBUG nova.virt.hardware [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1444.923197] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3464eda-8716-44cf-a2c7-72010fe959fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.936797] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.938139] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee660c4-747c-444b-a25f-8916c0302fb2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.965602] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52eaafd6-ced0-73cc-2356-680d1ecf9244, 'name': SearchDatastore_Task, 'duration_secs': 0.018315} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.966089] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa11a9f4-e693-4e0a-b076-a36ba517666c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1444.972999] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1444.972999] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524950dd-e090-14a7-d465-c0e1f8aedcaf" [ 1444.972999] env[63371]: _type = "Task" [ 1444.972999] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1444.982634] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524950dd-e090-14a7-d465-c0e1f8aedcaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.116202] env[63371]: DEBUG nova.scheduler.client.report [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1445.129744] env[63371]: INFO nova.compute.manager [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Took 50.17 seconds to build instance. [ 1445.197181] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1445.380880] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773842, 'name': Destroy_Task, 'duration_secs': 1.297574} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.381300] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Destroyed the VM [ 1445.381629] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1445.382493] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4cacd534-8fc9-430a-8074-43bb323b28e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.388105] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Updating instance_info_cache with network_info: [{"id": "d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9", "address": "fa:16:3e:0c:22:88", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4ac9418-86", "ovs_interfaceid": "d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1445.394837] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1445.394837] env[63371]: value = "task-1773844" [ 1445.394837] env[63371]: _type = "Task" [ 1445.394837] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.406439] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773844, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.433136] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773843, 'name': PowerOffVM_Task, 'duration_secs': 0.23397} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.433431] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1445.438826] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Reconfiguring VM instance instance-00000021 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1445.439123] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0195a87e-c4de-49d4-89d0-e1657297fb7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.462732] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1445.462732] env[63371]: value = "task-1773845" [ 1445.462732] env[63371]: _type = "Task" [ 1445.462732] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.464041] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1445.464041] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368298', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'name': 'volume-fd8f0908-509b-4986-8eae-d6db5f10b561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cbcdfe1a-86a4-4a12-99b5-44d291d41769', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'serial': 'fd8f0908-509b-4986-8eae-d6db5f10b561'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1445.464881] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0775f3e-d942-442c-86a0-6ca8ec9de675 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.493438] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773845, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.494288] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d60bab6-d595-469b-9ba7-83c1c2cc654b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.507036] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524950dd-e090-14a7-d465-c0e1f8aedcaf, 'name': SearchDatastore_Task, 'duration_secs': 0.024268} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.523409] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.523746] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b5e259ea-d103-41c6-84b3-748813bb514d/b5e259ea-d103-41c6-84b3-748813bb514d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1445.532796] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] volume-fd8f0908-509b-4986-8eae-d6db5f10b561/volume-fd8f0908-509b-4986-8eae-d6db5f10b561.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1445.533426] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ef7679f-c1e2-4660-b8cf-96128ad2e611 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.535728] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-405e5696-6bbc-4eae-a3dd-f1d2681d1b62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.558875] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1445.558875] env[63371]: value = "task-1773846" [ 1445.558875] env[63371]: _type = "Task" [ 1445.558875] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.561036] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Waiting for the task: (returnval){ [ 1445.561036] env[63371]: value = "task-1773847" [ 1445.561036] env[63371]: _type = "Task" [ 1445.561036] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.577516] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773846, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.577516] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773847, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.632817] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c4e80e34-a841-4f01-a929-fc9a2d4b0be2 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.410s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.634108] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.798s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.634578] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1445.638359] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.214s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.638560] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1445.640800] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.597s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1445.643958] env[63371]: INFO nova.compute.claims [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1445.680508] env[63371]: INFO nova.scheduler.client.report [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Deleted allocations for instance 852e14a7-2f9f-421c-9804-56c885885c7d [ 1445.896118] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "refresh_cache-fb2ddd3e-7adc-4a34-8797-0e98fdf19379" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1445.896118] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Instance network_info: |[{"id": "d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9", "address": "fa:16:3e:0c:22:88", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4ac9418-86", "ovs_interfaceid": "d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1445.896592] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:22:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2a75bb6e-6331-4429-b1b9-c968cc22b9c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd4ac9418-864a-4adf-ab92-bb5c3dbb8ec9', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1445.906912] env[63371]: DEBUG oslo.service.loopingcall [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1445.908242] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1445.911328] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7edeb84b-4546-44cf-afe8-6d4e5b2283ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.930327] env[63371]: DEBUG nova.compute.manager [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Received event network-changed-d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1445.930531] env[63371]: DEBUG nova.compute.manager [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Refreshing instance network info cache due to event network-changed-d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1445.931306] env[63371]: DEBUG oslo_concurrency.lockutils [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] Acquiring lock "refresh_cache-fb2ddd3e-7adc-4a34-8797-0e98fdf19379" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1445.931306] env[63371]: DEBUG oslo_concurrency.lockutils [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] Acquired lock "refresh_cache-fb2ddd3e-7adc-4a34-8797-0e98fdf19379" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1445.931306] env[63371]: DEBUG nova.network.neutron [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Refreshing network info cache for port d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1445.939372] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773844, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.943021] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1445.943021] env[63371]: value = "task-1773848" [ 1445.943021] env[63371]: _type = "Task" [ 1445.943021] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.952537] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773848, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.974254] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773845, 'name': ReconfigVM_Task, 'duration_secs': 0.262068} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1445.974540] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Reconfigured VM instance instance-00000021 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1445.974716] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1445.975399] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-36931e09-261e-4613-ae42-7c5d664baa97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.983778] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1445.983778] env[63371]: value = "task-1773849" [ 1445.983778] env[63371]: _type = "Task" [ 1445.983778] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.001032] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773849, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.076295] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773846, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.081257] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773847, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.147614] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1446.153891] env[63371]: DEBUG nova.compute.utils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1446.153891] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1446.153891] env[63371]: DEBUG nova.network.neutron [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1446.192340] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4674e3b-a0fe-4886-be7b-31d4afcab11a tempest-DeleteServersAdminTestJSON-1119476989 tempest-DeleteServersAdminTestJSON-1119476989-project-member] Lock "852e14a7-2f9f-421c-9804-56c885885c7d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.944s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.283872] env[63371]: DEBUG nova.policy [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25f28e53648c41d1a147c1aa04f0a708', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fb0da840f6847f19f03a1db8a1c3f4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1446.426778] env[63371]: DEBUG oslo_vmware.api [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773844, 'name': RemoveSnapshot_Task, 'duration_secs': 0.907456} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.428983] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1446.428983] env[63371]: INFO nova.compute.manager [None req-4b46f342-0732-4eb5-8bdc-f7043a6a8d7a tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Took 16.67 seconds to snapshot the instance on the hypervisor. [ 1446.461585] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773848, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.495437] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773849, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.579392] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773846, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.719697} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.579829] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b5e259ea-d103-41c6-84b3-748813bb514d/b5e259ea-d103-41c6-84b3-748813bb514d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1446.580324] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1446.580788] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773847, 'name': ReconfigVM_Task, 'duration_secs': 0.630077} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.581131] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-62b3e9ba-95f3-4b28-9a1b-1c329aee767d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.583296] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Reconfigured VM instance instance-0000000d to attach disk [datastore1] volume-fd8f0908-509b-4986-8eae-d6db5f10b561/volume-fd8f0908-509b-4986-8eae-d6db5f10b561.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1446.589065] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-209b7b5e-44a9-41aa-b048-dd5d9398c76a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.619052] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Waiting for the task: (returnval){ [ 1446.619052] env[63371]: value = "task-1773851" [ 1446.619052] env[63371]: _type = "Task" [ 1446.619052] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.621785] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1446.621785] env[63371]: value = "task-1773850" [ 1446.621785] env[63371]: _type = "Task" [ 1446.621785] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.637586] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773850, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.641715] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773851, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.661826] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1446.685167] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.857273] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Successfully updated port: 767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1446.956889] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773848, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.006627] env[63371]: DEBUG oslo_vmware.api [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773849, 'name': PowerOnVM_Task, 'duration_secs': 0.725611} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.007288] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1447.007288] env[63371]: DEBUG nova.compute.manager [None req-e1da562f-b9e0-4fe5-807f-3e369d73fbc8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1447.010872] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb3e24c-f7c8-47d5-9509-875e55f8eec3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.142958] env[63371]: DEBUG oslo_vmware.api [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773851, 'name': ReconfigVM_Task, 'duration_secs': 0.194442} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.146018] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773850, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117761} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.146018] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368298', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'name': 'volume-fd8f0908-509b-4986-8eae-d6db5f10b561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cbcdfe1a-86a4-4a12-99b5-44d291d41769', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'serial': 'fd8f0908-509b-4986-8eae-d6db5f10b561'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1447.146018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1447.146838] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c8d8812-72e5-4639-9606-bc41ff030508 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.153714] env[63371]: DEBUG nova.network.neutron [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Updated VIF entry in instance network info cache for port d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1447.154339] env[63371]: DEBUG nova.network.neutron [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Updating instance_info_cache with network_info: [{"id": "d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9", "address": "fa:16:3e:0c:22:88", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd4ac9418-86", "ovs_interfaceid": "d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.182446] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] b5e259ea-d103-41c6-84b3-748813bb514d/b5e259ea-d103-41c6-84b3-748813bb514d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1447.185908] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8de1ceea-d3b9-48c3-b541-30ab1d73c673 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.215956] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1447.215956] env[63371]: value = "task-1773852" [ 1447.215956] env[63371]: _type = "Task" [ 1447.215956] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.226706] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773852, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.235022] env[63371]: DEBUG nova.network.neutron [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Successfully created port: e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1447.361991] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "refresh_cache-36b81143-211f-4c77-854b-abe0d3f39ce4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1447.361991] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "refresh_cache-36b81143-211f-4c77-854b-abe0d3f39ce4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1447.361991] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1447.380018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d9b394-0ed1-4c3a-885b-b09dceea50d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.389634] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d02c22-5573-4eed-b4b4-a29b05519d5c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.431557] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76b42c7-da87-4074-8373-5eec031af5dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.443961] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a7fabcb-f7fa-4890-a1db-6e5909d4810e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.458992] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773848, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.467923] env[63371]: DEBUG nova.compute.provider_tree [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1447.658532] env[63371]: DEBUG oslo_concurrency.lockutils [req-f42a4634-2156-4157-87a7-7b7dd6044fcd req-df1bc919-2ab8-4707-9f72-0d85db365a19 service nova] Releasing lock "refresh_cache-fb2ddd3e-7adc-4a34-8797-0e98fdf19379" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1447.687672] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1447.732301] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773852, 'name': ReconfigVM_Task, 'duration_secs': 0.442157} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1447.735283] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Reconfigured VM instance instance-00000023 to attach disk [datastore1] b5e259ea-d103-41c6-84b3-748813bb514d/b5e259ea-d103-41c6-84b3-748813bb514d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1447.736160] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-964b3210-3e95-43a0-80fb-827f9c181cae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.744873] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1447.745135] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1447.745260] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1447.745434] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1447.745626] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1447.745714] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1447.745956] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1447.746112] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1447.746292] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1447.746456] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1447.746626] env[63371]: DEBUG nova.virt.hardware [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1447.747552] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec38edac-578e-4cd8-938e-f07bd56d3721 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.754308] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1447.754308] env[63371]: value = "task-1773853" [ 1447.754308] env[63371]: _type = "Task" [ 1447.754308] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1447.765395] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-845d4315-c85d-42bc-8b56-f021074d1056 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.774117] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773853, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.905014] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1447.962021] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773848, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1447.972555] env[63371]: DEBUG nova.scheduler.client.report [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1448.109459] env[63371]: DEBUG nova.network.neutron [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Updating instance_info_cache with network_info: [{"id": "767b2818-8eb3-4f76-8def-793f9f31a087", "address": "fa:16:3e:99:fa:22", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767b2818-8e", "ovs_interfaceid": "767b2818-8eb3-4f76-8def-793f9f31a087", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1448.238814] env[63371]: DEBUG nova.objects.instance [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lazy-loading 'flavor' on Instance uuid cbcdfe1a-86a4-4a12-99b5-44d291d41769 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1448.248515] env[63371]: DEBUG nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Received event network-vif-plugged-767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1448.248515] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Acquiring lock "36b81143-211f-4c77-854b-abe0d3f39ce4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.248515] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.248515] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.248515] env[63371]: DEBUG nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] No waiting events found dispatching network-vif-plugged-767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1448.248770] env[63371]: WARNING nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Received unexpected event network-vif-plugged-767b2818-8eb3-4f76-8def-793f9f31a087 for instance with vm_state building and task_state spawning. [ 1448.248770] env[63371]: DEBUG nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Received event network-changed-1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1448.248770] env[63371]: DEBUG nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Refreshing instance network info cache due to event network-changed-1d08ea03-4a7c-43bc-9a11-db1f92c6c505. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1448.248770] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Acquiring lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.248770] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Acquired lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.248954] env[63371]: DEBUG nova.network.neutron [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Refreshing network info cache for port 1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1448.264516] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773853, 'name': Rename_Task, 'duration_secs': 0.174556} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.265130] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1448.265473] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f69eefce-fff5-4555-999e-406e89607884 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.278744] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1448.278744] env[63371]: value = "task-1773854" [ 1448.278744] env[63371]: _type = "Task" [ 1448.278744] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.289398] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773854, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.460730] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773848, 'name': CreateVM_Task, 'duration_secs': 2.088981} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.461893] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1448.461893] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.463411] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.465526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1448.465526] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b96898e-05c6-4433-9135-e958113eac53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.472066] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1448.472066] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5222c44c-3750-657e-c552-313f94e4b67f" [ 1448.472066] env[63371]: _type = "Task" [ 1448.472066] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.478254] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.837s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.478760] env[63371]: DEBUG nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1448.486297] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.783s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.487874] env[63371]: INFO nova.compute.claims [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1448.507858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "b48a8e83-e581-4886-833b-bbce155d40d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.507858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "b48a8e83-e581-4886-833b-bbce155d40d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.507858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "b48a8e83-e581-4886-833b-bbce155d40d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1448.507858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "b48a8e83-e581-4886-833b-bbce155d40d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1448.508440] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "b48a8e83-e581-4886-833b-bbce155d40d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.508440] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5222c44c-3750-657e-c552-313f94e4b67f, 'name': SearchDatastore_Task, 'duration_secs': 0.013878} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.508574] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.508739] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1448.508950] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1448.509111] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1448.509317] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1448.509858] env[63371]: INFO nova.compute.manager [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Terminating instance [ 1448.514427] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-504125f7-855b-4b8f-b9d5-5edddbbb1369 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.518069] env[63371]: DEBUG nova.compute.manager [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1448.519041] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1448.519231] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77edf355-cd23-4a41-bc4e-e28f6863e15a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.528258] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1448.528532] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09972cc8-a1ca-440e-8a90-a9416875c646 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.531335] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1448.532860] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1448.532860] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef51b12a-bba3-4133-8057-449e77801e4d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.540457] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1448.540457] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5284158b-81f6-a2cf-351e-2d67020d4835" [ 1448.540457] env[63371]: _type = "Task" [ 1448.540457] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.540802] env[63371]: DEBUG oslo_vmware.api [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1448.540802] env[63371]: value = "task-1773855" [ 1448.540802] env[63371]: _type = "Task" [ 1448.540802] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.555415] env[63371]: DEBUG oslo_vmware.api [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773855, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.560633] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5284158b-81f6-a2cf-351e-2d67020d4835, 'name': SearchDatastore_Task, 'duration_secs': 0.012408} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1448.561827] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56c8974f-f9f3-43c7-9652-09e5678d76d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.570807] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1448.570807] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5260a9bf-30f0-495d-6297-4213bfe2f638" [ 1448.570807] env[63371]: _type = "Task" [ 1448.570807] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.581539] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5260a9bf-30f0-495d-6297-4213bfe2f638, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.612639] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "refresh_cache-36b81143-211f-4c77-854b-abe0d3f39ce4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1448.613141] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Instance network_info: |[{"id": "767b2818-8eb3-4f76-8def-793f9f31a087", "address": "fa:16:3e:99:fa:22", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767b2818-8e", "ovs_interfaceid": "767b2818-8eb3-4f76-8def-793f9f31a087", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1448.614043] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:fa:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2a75bb6e-6331-4429-b1b9-c968cc22b9c9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '767b2818-8eb3-4f76-8def-793f9f31a087', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1448.621500] env[63371]: DEBUG oslo.service.loopingcall [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1448.621960] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1448.621960] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-90b31ea5-4f6e-4dc8-9f03-e64f467ae279 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.647834] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1448.647834] env[63371]: value = "task-1773856" [ 1448.647834] env[63371]: _type = "Task" [ 1448.647834] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1448.659412] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773856, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.746128] env[63371]: DEBUG oslo_concurrency.lockutils [None req-42b387ae-2a0b-408e-be41-cd3e81250519 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.942s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.793766] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773854, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1448.885361] env[63371]: DEBUG nova.compute.manager [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1448.886379] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee5d03b-cff4-4c76-a813-e14342b7e86f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.000857] env[63371]: DEBUG nova.compute.utils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1449.001174] env[63371]: DEBUG nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Not allocating networking since 'none' was specified. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1449.055275] env[63371]: DEBUG oslo_vmware.api [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773855, 'name': PowerOffVM_Task, 'duration_secs': 0.205691} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.055578] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1449.056463] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1449.056463] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3bbee63a-6a58-4a8e-bea0-023492208035 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.083128] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5260a9bf-30f0-495d-6297-4213bfe2f638, 'name': SearchDatastore_Task, 'duration_secs': 0.011996} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.083188] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1449.084166] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] fb2ddd3e-7adc-4a34-8797-0e98fdf19379/fb2ddd3e-7adc-4a34-8797-0e98fdf19379.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1449.084166] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd7be5b3-8cf3-41a6-9f3b-699a45339673 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.094383] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1449.094383] env[63371]: value = "task-1773858" [ 1449.094383] env[63371]: _type = "Task" [ 1449.094383] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.105887] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773858, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.145681] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1449.145836] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1449.146028] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Deleting the datastore file [datastore1] b48a8e83-e581-4886-833b-bbce155d40d9 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1449.146305] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a266d8c0-9857-438a-84c7-f93059c9ef97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.161168] env[63371]: DEBUG oslo_vmware.api [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1449.161168] env[63371]: value = "task-1773859" [ 1449.161168] env[63371]: _type = "Task" [ 1449.161168] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.165091] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773856, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.228581] env[63371]: DEBUG nova.network.neutron [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Updated VIF entry in instance network info cache for port 1d08ea03-4a7c-43bc-9a11-db1f92c6c505. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1449.228951] env[63371]: DEBUG nova.network.neutron [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Updating instance_info_cache with network_info: [{"id": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "address": "fa:16:3e:ac:de:06", "network": {"id": "50743102-4d46-4fa2-b7b2-9d99ef746757", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-137039111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.207", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "941e719c77a84e8d8fe0107968a0f527", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ffcecdaa-a7b8-49fc-9371-dbdb7744688e", "external-id": "nsx-vlan-transportzone-994", "segmentation_id": 994, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1d08ea03-4a", "ovs_interfaceid": "1d08ea03-4a7c-43bc-9a11-db1f92c6c505", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.246944] env[63371]: DEBUG nova.network.neutron [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Successfully updated port: e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1449.290598] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773854, 'name': PowerOnVM_Task, 'duration_secs': 0.519661} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.290875] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1449.291091] env[63371]: INFO nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Took 10.03 seconds to spawn the instance on the hypervisor. [ 1449.291279] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1449.292119] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353ca6ae-96c2-4413-a405-76a74c259bbb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.406032] env[63371]: INFO nova.compute.manager [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] instance snapshotting [ 1449.408742] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1b3fd5-adfc-411e-9af8-637336edb45a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.430859] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b1e3fa-48b1-4dad-8956-256e2fc11819 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.263178] env[63371]: DEBUG nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1450.268421] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Releasing lock "refresh_cache-e8bd5802-d2ff-4348-92d4-c23277f4eaeb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.268860] env[63371]: DEBUG nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Received event network-changed-767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1450.269052] env[63371]: DEBUG nova.compute.manager [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Refreshing instance network info cache due to event network-changed-767b2818-8eb3-4f76-8def-793f9f31a087. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1450.269257] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Acquiring lock "refresh_cache-36b81143-211f-4c77-854b-abe0d3f39ce4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.269443] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Acquired lock "refresh_cache-36b81143-211f-4c77-854b-abe0d3f39ce4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.269606] env[63371]: DEBUG nova.network.neutron [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Refreshing network info cache for port 767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1450.271153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "refresh_cache-855005ae-3b0e-4ad7-80cf-266075fc6d0f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.271271] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "refresh_cache-855005ae-3b0e-4ad7-80cf-266075fc6d0f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.271419] env[63371]: DEBUG nova.network.neutron [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1450.276704] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1450.290917] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1fe61b5f-40eb-440d-9fa3-5b7c9aa8a59c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.299982] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "6df9af10-0053-4696-920a-10ab2af67ef5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.299982] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "6df9af10-0053-4696-920a-10ab2af67ef5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.301162] env[63371]: DEBUG nova.compute.manager [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Received event network-vif-plugged-e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1450.301355] env[63371]: DEBUG oslo_concurrency.lockutils [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] Acquiring lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.301554] env[63371]: DEBUG oslo_concurrency.lockutils [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.301719] env[63371]: DEBUG oslo_concurrency.lockutils [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.301893] env[63371]: DEBUG nova.compute.manager [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] No waiting events found dispatching network-vif-plugged-e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1450.302193] env[63371]: WARNING nova.compute.manager [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Received unexpected event network-vif-plugged-e4eb0664-61b0-40ee-a907-faa96a4e1c4d for instance with vm_state building and task_state spawning. [ 1450.302384] env[63371]: DEBUG nova.compute.manager [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Received event network-changed-e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1450.302552] env[63371]: DEBUG nova.compute.manager [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Refreshing instance network info cache due to event network-changed-e4eb0664-61b0-40ee-a907-faa96a4e1c4d. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1450.302717] env[63371]: DEBUG oslo_concurrency.lockutils [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] Acquiring lock "refresh_cache-855005ae-3b0e-4ad7-80cf-266075fc6d0f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.305907] env[63371]: INFO nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Took 52.56 seconds to build instance. [ 1450.316687] env[63371]: DEBUG oslo_vmware.api [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773859, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.685923} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.316940] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773858, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.765926} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.321459] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1450.321673] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1450.321841] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1450.322184] env[63371]: INFO nova.compute.manager [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Took 1.80 seconds to destroy the instance on the hypervisor. [ 1450.322291] env[63371]: DEBUG oslo.service.loopingcall [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1450.322503] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] fb2ddd3e-7adc-4a34-8797-0e98fdf19379/fb2ddd3e-7adc-4a34-8797-0e98fdf19379.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1450.322694] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1450.322988] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1450.322988] env[63371]: value = "task-1773860" [ 1450.322988] env[63371]: _type = "Task" [ 1450.322988] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.324370] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773856, 'name': CreateVM_Task, 'duration_secs': 0.669174} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.326971] env[63371]: DEBUG nova.compute.manager [-] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1450.327106] env[63371]: DEBUG nova.network.neutron [-] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1450.328938] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40062222-d543-4224-b585-c1b9b61de33c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.331359] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1450.335415] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.335595] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.335877] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1450.337133] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c921b3f-422a-48dc-ad32-67aaa5b86404 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.349059] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773860, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.349425] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1450.349425] env[63371]: value = "task-1773861" [ 1450.349425] env[63371]: _type = "Task" [ 1450.349425] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.349651] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1450.349651] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52df57c5-c098-15ee-b6bd-b2b44558f6fe" [ 1450.349651] env[63371]: _type = "Task" [ 1450.349651] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.363968] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773861, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.370385] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df57c5-c098-15ee-b6bd-b2b44558f6fe, 'name': SearchDatastore_Task, 'duration_secs': 0.018143} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.372889] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1450.373163] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1450.373423] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1450.373579] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1450.373764] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1450.374429] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f462aaa7-6ee7-42ac-826a-7d19684734a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.391317] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1450.391572] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1450.397999] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a457687a-d4cf-480a-9a0c-66518f6ff174 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.404029] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1450.404029] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b2f70c-5be0-6202-f734-1b03672ebe29" [ 1450.404029] env[63371]: _type = "Task" [ 1450.404029] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.413135] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b2f70c-5be0-6202-f734-1b03672ebe29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.805425] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1450.806094] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1450.810383] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "b5e259ea-d103-41c6-84b3-748813bb514d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.222s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1450.825669] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a94a0cb-009f-4e08-84f9-a350d92f8e84 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.840230] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e791ed-4b79-4ff1-b371-00ecd562cb5a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.852939] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773860, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.878373] env[63371]: DEBUG nova.network.neutron [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1450.884958] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe41a9b-5229-4e06-86d8-55c6385110da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.894766] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773861, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.20166} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.895968] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cafa8871-b408-4dd2-8fa2-8bf95e83a08e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.900424] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1450.901395] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20483106-ed22-49da-b45f-114716dd155f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.925808] env[63371]: DEBUG nova.compute.provider_tree [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1450.937748] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] fb2ddd3e-7adc-4a34-8797-0e98fdf19379/fb2ddd3e-7adc-4a34-8797-0e98fdf19379.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1450.943183] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-851db88d-d7e5-4aa6-8fae-fa2f3e0f6088 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.964246] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b2f70c-5be0-6202-f734-1b03672ebe29, 'name': SearchDatastore_Task, 'duration_secs': 0.038879} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1450.965964] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-774b7d7e-eaac-4236-a227-39f51002c9d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.971269] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1450.971269] env[63371]: value = "task-1773862" [ 1450.971269] env[63371]: _type = "Task" [ 1450.971269] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.972881] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1450.972881] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520f5697-46e3-7d6f-a8ce-4c678159a12e" [ 1450.972881] env[63371]: _type = "Task" [ 1450.972881] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.985115] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520f5697-46e3-7d6f-a8ce-4c678159a12e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.988335] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773862, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.079745] env[63371]: DEBUG nova.network.neutron [-] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.274386] env[63371]: DEBUG nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1451.289853] env[63371]: DEBUG nova.network.neutron [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Updating instance_info_cache with network_info: [{"id": "e4eb0664-61b0-40ee-a907-faa96a4e1c4d", "address": "fa:16:3e:82:53:c9", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4eb0664-61", "ovs_interfaceid": "e4eb0664-61b0-40ee-a907-faa96a4e1c4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.309705] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1451.310385] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1451.310385] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1451.310385] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1451.313155] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1451.313155] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1451.313155] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1451.313155] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1451.313155] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1451.313393] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1451.313393] env[63371]: DEBUG nova.virt.hardware [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1451.313393] env[63371]: INFO nova.compute.manager [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Detaching volume fd8f0908-509b-4986-8eae-d6db5f10b561 [ 1451.314898] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1451.317981] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a534a387-119a-44fc-a8c2-56180c21e8b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.332163] env[63371]: DEBUG nova.network.neutron [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Updated VIF entry in instance network info cache for port 767b2818-8eb3-4f76-8def-793f9f31a087. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1451.332693] env[63371]: DEBUG nova.network.neutron [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Updating instance_info_cache with network_info: [{"id": "767b2818-8eb3-4f76-8def-793f9f31a087", "address": "fa:16:3e:99:fa:22", "network": {"id": "3ec0c207-47a1-4985-9d02-f5b91dfed5fb", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1944224465-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd0696c715249779160762b8ecd83e6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2a75bb6e-6331-4429-b1b9-c968cc22b9c9", "external-id": "nsx-vlan-transportzone-244", "segmentation_id": 244, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap767b2818-8e", "ovs_interfaceid": "767b2818-8eb3-4f76-8def-793f9f31a087", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1451.336468] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082b1769-69fa-4634-9154-7e3a3d248888 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.361773] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773860, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.362332] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1451.368694] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Creating folder: Project (d67736854dbf430f8eae90eb3d8e4bb8). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1451.369415] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f93e2063-fa89-4f20-b18c-e03c8f759f39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.372553] env[63371]: INFO nova.virt.block_device [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Attempting to driver detach volume fd8f0908-509b-4986-8eae-d6db5f10b561 from mountpoint /dev/sdb [ 1451.372891] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1451.372982] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368298', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'name': 'volume-fd8f0908-509b-4986-8eae-d6db5f10b561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cbcdfe1a-86a4-4a12-99b5-44d291d41769', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'serial': 'fd8f0908-509b-4986-8eae-d6db5f10b561'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1451.373839] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de51d5da-ea29-4c30-8c62-732f736aaf16 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.399498] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36723f01-1276-45a8-aaf7-0223faa00e98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.402169] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Created folder: Project (d67736854dbf430f8eae90eb3d8e4bb8) in parent group-v368199. [ 1451.402169] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Creating folder: Instances. Parent ref: group-v368304. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1451.402739] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-64366396-191d-4b8b-98f8-e2e43f85e265 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.412322] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980c0cc7-a6d1-49fe-b42f-19038e45280d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.417609] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Created folder: Instances in parent group-v368304. [ 1451.417945] env[63371]: DEBUG oslo.service.loopingcall [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1451.418222] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1451.418473] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3bbcdf46-1ca4-495d-8c87-b3add02a0af7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.449634] env[63371]: DEBUG nova.scheduler.client.report [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1451.457235] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1787f780-70f8-4b27-8dc7-9d2c351716e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.459422] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1451.459422] env[63371]: value = "task-1773865" [ 1451.459422] env[63371]: _type = "Task" [ 1451.459422] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.475451] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] The volume has not been displaced from its original location: [datastore1] volume-fd8f0908-509b-4986-8eae-d6db5f10b561/volume-fd8f0908-509b-4986-8eae-d6db5f10b561.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1451.483286] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Reconfiguring VM instance instance-0000000d to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1451.488313] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4d72599-0ec0-445c-a937-df8d1e870722 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.509091] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773865, 'name': CreateVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.517088] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520f5697-46e3-7d6f-a8ce-4c678159a12e, 'name': SearchDatastore_Task, 'duration_secs': 0.02552} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.521109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.521436] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 36b81143-211f-4c77-854b-abe0d3f39ce4/36b81143-211f-4c77-854b-abe0d3f39ce4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1451.521798] env[63371]: DEBUG oslo_vmware.api [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Waiting for the task: (returnval){ [ 1451.521798] env[63371]: value = "task-1773866" [ 1451.521798] env[63371]: _type = "Task" [ 1451.521798] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.522087] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773862, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.522343] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97203504-306b-409b-b966-6d21b1f77743 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.535765] env[63371]: DEBUG oslo_vmware.api [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773866, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.536883] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1451.536883] env[63371]: value = "task-1773867" [ 1451.536883] env[63371]: _type = "Task" [ 1451.536883] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.546879] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773867, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.582597] env[63371]: INFO nova.compute.manager [-] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Took 1.26 seconds to deallocate network for instance. [ 1451.792681] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "refresh_cache-855005ae-3b0e-4ad7-80cf-266075fc6d0f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.793154] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Instance network_info: |[{"id": "e4eb0664-61b0-40ee-a907-faa96a4e1c4d", "address": "fa:16:3e:82:53:c9", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4eb0664-61", "ovs_interfaceid": "e4eb0664-61b0-40ee-a907-faa96a4e1c4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1451.793407] env[63371]: DEBUG oslo_concurrency.lockutils [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] Acquired lock "refresh_cache-855005ae-3b0e-4ad7-80cf-266075fc6d0f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.793821] env[63371]: DEBUG nova.network.neutron [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Refreshing network info cache for port e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1451.795980] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:53:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4eb0664-61b0-40ee-a907-faa96a4e1c4d', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1451.806301] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating folder: Project (9fb0da840f6847f19f03a1db8a1c3f4f). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1451.811660] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-612c8247-28f0-4c1e-9b84-25503bd273e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.831404] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Created folder: Project (9fb0da840f6847f19f03a1db8a1c3f4f) in parent group-v368199. [ 1451.831807] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating folder: Instances. Parent ref: group-v368308. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1451.832392] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57abe06b-0db8-40d6-81c6-2f9ee23ee2ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.843462] env[63371]: DEBUG oslo_concurrency.lockutils [req-17cb0a69-6605-4cd0-b61f-334f2aa79cf1 req-2ffda00f-50f0-4be8-a129-3a2bf9b4b93f service nova] Releasing lock "refresh_cache-36b81143-211f-4c77-854b-abe0d3f39ce4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1451.852533] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Created folder: Instances in parent group-v368308. [ 1451.852533] env[63371]: DEBUG oslo.service.loopingcall [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1451.852533] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1451.853373] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1451.854028] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e5fe1fcc-ec98-4841-a808-77653526be32 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.884039] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773860, 'name': CreateSnapshot_Task, 'duration_secs': 1.322884} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.884039] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1451.884364] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2a9c67-0a93-4c39-8d31-dbc3c248e1d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.893031] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1451.893031] env[63371]: value = "task-1773870" [ 1451.893031] env[63371]: _type = "Task" [ 1451.893031] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1451.908558] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773870, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.958401] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.476s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1451.960039] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1451.962502] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.085s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1451.964053] env[63371]: INFO nova.compute.claims [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1451.984514] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773865, 'name': CreateVM_Task, 'duration_secs': 0.451299} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.984514] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1451.985364] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1451.985594] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1451.985963] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1451.986310] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3045158-deca-4d3c-966c-040e662a0ec9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.991673] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773862, 'name': ReconfigVM_Task, 'duration_secs': 0.649441} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.992423] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Reconfigured VM instance instance-00000024 to attach disk [datastore1] fb2ddd3e-7adc-4a34-8797-0e98fdf19379/fb2ddd3e-7adc-4a34-8797-0e98fdf19379.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1451.993111] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c30209f-b257-4b71-94b4-96dca65031d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1451.996514] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1451.996514] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525841ef-eae5-418f-67fc-259b9c97ffad" [ 1451.996514] env[63371]: _type = "Task" [ 1451.996514] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.003388] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "cfbd0c7c-243e-497a-acb1-ab9323c23574" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.003662] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.004299] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "cfbd0c7c-243e-497a-acb1-ab9323c23574-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.004299] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.004299] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1452.011665] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1452.011665] env[63371]: value = "task-1773871" [ 1452.011665] env[63371]: _type = "Task" [ 1452.011665] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.012105] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525841ef-eae5-418f-67fc-259b9c97ffad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.012735] env[63371]: INFO nova.compute.manager [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Terminating instance [ 1452.019776] env[63371]: DEBUG nova.compute.manager [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1452.019776] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1452.020947] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d2600f-dd1a-4a56-ac75-ff9cfe95ff1e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.032477] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773871, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.038349] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1452.042782] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f98643b-82f5-4393-9d3f-51b14395f8fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.050352] env[63371]: DEBUG oslo_vmware.api [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773866, 'name': ReconfigVM_Task, 'duration_secs': 0.36632} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.050352] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Reconfigured VM instance instance-0000000d to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1452.059972] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7582ab9f-7a01-4153-8298-92a8468734f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.071187] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1452.071187] env[63371]: value = "task-1773872" [ 1452.071187] env[63371]: _type = "Task" [ 1452.071187] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.071435] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773867, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.084131] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773872, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.088630] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.088989] env[63371]: DEBUG oslo_vmware.api [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Waiting for the task: (returnval){ [ 1452.088989] env[63371]: value = "task-1773873" [ 1452.088989] env[63371]: _type = "Task" [ 1452.088989] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.102023] env[63371]: DEBUG oslo_vmware.api [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773873, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.317349] env[63371]: DEBUG nova.network.neutron [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Updated VIF entry in instance network info cache for port e4eb0664-61b0-40ee-a907-faa96a4e1c4d. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1452.318535] env[63371]: DEBUG nova.network.neutron [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Updating instance_info_cache with network_info: [{"id": "e4eb0664-61b0-40ee-a907-faa96a4e1c4d", "address": "fa:16:3e:82:53:c9", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4eb0664-61", "ovs_interfaceid": "e4eb0664-61b0-40ee-a907-faa96a4e1c4d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1452.327676] env[63371]: DEBUG nova.compute.manager [req-ab28a0ae-1bb9-4ca4-8358-d9200e975921 req-313fd6ed-cfba-4d65-a52b-6b7a5893cad4 service nova] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Received event network-vif-deleted-9aa089f3-fe69-452e-b5e4-4daac745b9bb {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1452.409661] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1452.409932] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773870, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.410179] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5d4363b0-ad4c-425e-b333-e50efcbaf3b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.419429] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1452.419429] env[63371]: value = "task-1773874" [ 1452.419429] env[63371]: _type = "Task" [ 1452.419429] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.429502] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.473524] env[63371]: DEBUG nova.compute.utils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1452.474600] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1452.474763] env[63371]: DEBUG nova.network.neutron [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1452.516924] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525841ef-eae5-418f-67fc-259b9c97ffad, 'name': SearchDatastore_Task, 'duration_secs': 0.057646} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.521435] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.522109] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1452.524015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.525049] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.525049] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1452.529087] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae8f7396-b02e-4b45-bbcf-92a240dcc370 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.540106] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773871, 'name': Rename_Task, 'duration_secs': 0.31484} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.542712] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1452.543673] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85692c0b-c9d3-40b5-b166-225a47232131 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.551785] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1452.551785] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1452.552867] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-480ea262-1d83-4894-b7d9-e0112964891b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.561642] env[63371]: DEBUG nova.policy [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc60aa7184b4427291f5766e345bc854', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1771acadeced40a6889b7dfb974e7886', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1452.563794] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773867, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.68683} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.565804] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 36b81143-211f-4c77-854b-abe0d3f39ce4/36b81143-211f-4c77-854b-abe0d3f39ce4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1452.566144] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1452.566901] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1452.566901] env[63371]: value = "task-1773875" [ 1452.566901] env[63371]: _type = "Task" [ 1452.566901] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.567122] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3df48c2b-a20f-4e2b-b35f-8c0444ba24ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.574714] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1452.574714] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523a4ec3-ae5c-0064-631d-96a08cd37864" [ 1452.574714] env[63371]: _type = "Task" [ 1452.574714] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.589577] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773875, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.590282] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1452.590282] env[63371]: value = "task-1773876" [ 1452.590282] env[63371]: _type = "Task" [ 1452.590282] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.608124] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773872, 'name': PowerOffVM_Task, 'duration_secs': 0.398223} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.608898] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523a4ec3-ae5c-0064-631d-96a08cd37864, 'name': SearchDatastore_Task, 'duration_secs': 0.021331} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.610187] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1452.610345] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1452.611682] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bfc5807-c5ef-47d3-b41d-b85769745f23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.613041] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6d5262e-2ab5-4ee4-88a4-37d3dd68dc42 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.622806] env[63371]: DEBUG oslo_vmware.api [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Task: {'id': task-1773873, 'name': ReconfigVM_Task, 'duration_secs': 0.215725} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.628591] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368298', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'name': 'volume-fd8f0908-509b-4986-8eae-d6db5f10b561', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cbcdfe1a-86a4-4a12-99b5-44d291d41769', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd8f0908-509b-4986-8eae-d6db5f10b561', 'serial': 'fd8f0908-509b-4986-8eae-d6db5f10b561'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1452.631095] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773876, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.631524] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1452.631524] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5228166a-0026-b8f5-c66b-bf2c4e135655" [ 1452.631524] env[63371]: _type = "Task" [ 1452.631524] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.645961] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5228166a-0026-b8f5-c66b-bf2c4e135655, 'name': SearchDatastore_Task, 'duration_secs': 0.015674} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.645961] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.646372] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 50d5eac1-0752-4089-948c-b04439df6f6c/50d5eac1-0752-4089-948c-b04439df6f6c.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1452.646853] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-932a02c0-e597-497f-9d0a-2bad813b30a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.657330] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1452.657330] env[63371]: value = "task-1773878" [ 1452.657330] env[63371]: _type = "Task" [ 1452.657330] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.673298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "195de525-1081-4db6-acf3-04a6d3eb142f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1452.676729] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "195de525-1081-4db6-acf3-04a6d3eb142f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1452.676729] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773878, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.719069] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1452.719405] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1452.719705] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Deleting the datastore file [datastore1] cfbd0c7c-243e-497a-acb1-ab9323c23574 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1452.721020] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcb0c80d-f6a8-4296-952a-eba24d542ca9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.730256] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1452.730256] env[63371]: value = "task-1773879" [ 1452.730256] env[63371]: _type = "Task" [ 1452.730256] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.744579] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.822469] env[63371]: DEBUG oslo_concurrency.lockutils [req-d981b296-01ae-44d8-99be-7a28e1703239 req-750f056e-ecb7-4e07-8f35-f3e2a4a1b799 service nova] Releasing lock "refresh_cache-855005ae-3b0e-4ad7-80cf-266075fc6d0f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1452.907107] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773870, 'name': CreateVM_Task, 'duration_secs': 0.629293} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1452.907107] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1452.907107] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1452.907232] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1452.907762] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1452.908120] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be87eea3-b7ad-45c8-a6b1-dd69a4518b4a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.915837] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1452.915837] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fd5b76-04b8-f295-feaa-8cf8df4fc7e9" [ 1452.915837] env[63371]: _type = "Task" [ 1452.915837] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.928660] env[63371]: DEBUG nova.network.neutron [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Successfully created port: 78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1452.937664] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fd5b76-04b8-f295-feaa-8cf8df4fc7e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.948333] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1452.979774] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1453.087794] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773875, 'name': PowerOnVM_Task} progress is 76%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.106457] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773876, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.125268} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.109578] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1453.110886] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-359befaa-f172-40b1-a9c4-63091c4b73af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.153995] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 36b81143-211f-4c77-854b-abe0d3f39ce4/36b81143-211f-4c77-854b-abe0d3f39ce4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1453.158989] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f05bf0d-008e-4743-b6e8-8abd290d0407 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.200757] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773878, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.209523] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1453.209523] env[63371]: value = "task-1773880" [ 1453.209523] env[63371]: _type = "Task" [ 1453.209523] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.215945] env[63371]: DEBUG nova.objects.instance [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lazy-loading 'flavor' on Instance uuid cbcdfe1a-86a4-4a12-99b5-44d291d41769 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1453.224385] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773880, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.242320] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.428823] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fd5b76-04b8-f295-feaa-8cf8df4fc7e9, 'name': SearchDatastore_Task, 'duration_secs': 0.036417} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.433054] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1453.433437] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1453.433786] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1453.434043] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1453.434370] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1453.434998] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4dbf7474-5041-4f61-ac62-0d973a311153 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.441175] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.459032] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1453.459435] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1453.463021] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bda0830-d0e7-4e22-80be-2b6af5f6cd92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.476382] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1453.476382] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52df23e6-d995-7627-95cc-e6f9648c4067" [ 1453.476382] env[63371]: _type = "Task" [ 1453.476382] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.498567] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df23e6-d995-7627-95cc-e6f9648c4067, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.592598] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773875, 'name': PowerOnVM_Task} progress is 76%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.699692] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773878, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.038336} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.700642] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 50d5eac1-0752-4089-948c-b04439df6f6c/50d5eac1-0752-4089-948c-b04439df6f6c.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1453.700642] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1453.700815] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46b2aafb-f061-4b85-9791-ab3c7f9306e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.717185] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1453.717185] env[63371]: value = "task-1773881" [ 1453.717185] env[63371]: _type = "Task" [ 1453.717185] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.725504] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773880, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.733645] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773881, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.748738] env[63371]: DEBUG oslo_vmware.api [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1773879, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.619992} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.749011] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1453.749209] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1453.749385] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1453.749553] env[63371]: INFO nova.compute.manager [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Took 1.73 seconds to destroy the instance on the hypervisor. [ 1453.749797] env[63371]: DEBUG oslo.service.loopingcall [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1453.749986] env[63371]: DEBUG nova.compute.manager [-] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1453.750097] env[63371]: DEBUG nova.network.neutron [-] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1453.757861] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2118ecb-75a7-483a-b371-94118303dbdf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.767126] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1745e5-898c-4e6c-9864-d3fdcdd95b98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.800437] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6e2429-2a0a-47f1-a332-8a10cc136404 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.810400] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f4b7431-dd02-49df-9b47-00eabbca6086 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.828076] env[63371]: DEBUG nova.compute.provider_tree [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1453.934374] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.990662] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df23e6-d995-7627-95cc-e6f9648c4067, 'name': SearchDatastore_Task, 'duration_secs': 0.063338} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.991704] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55e9deab-f223-4229-9896-34d9a37825d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.000780] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1454.003036] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1454.003036] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5294f5f8-4a47-55b1-b253-66dd4af369a3" [ 1454.003036] env[63371]: _type = "Task" [ 1454.003036] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.023009] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5294f5f8-4a47-55b1-b253-66dd4af369a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.046612] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1454.047485] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1454.047485] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1454.049135] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1454.049379] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1454.049553] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1454.049798] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1454.049977] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1454.050193] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1454.050376] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1454.050610] env[63371]: DEBUG nova.virt.hardware [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1454.051686] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21506b4-2ef8-4dc7-8942-16cd3e50a784 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.061019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4e2c6b-3035-487c-a63d-154fc012c1d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.095329] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773875, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.227874] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773880, 'name': ReconfigVM_Task, 'duration_secs': 0.914051} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.228780] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 36b81143-211f-4c77-854b-abe0d3f39ce4/36b81143-211f-4c77-854b-abe0d3f39ce4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1454.229546] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-632880ea-06c8-4825-b21a-890b300ddf51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.235636] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773881, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117482} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.236804] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1454.237270] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1532be3-4770-47bb-a626-83b3c49635d9 tempest-VolumesAssistedSnapshotsTest-1531028009 tempest-VolumesAssistedSnapshotsTest-1531028009-project-admin] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.432s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.242025] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e331674e-31c1-484f-a8eb-ccac503185e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.243986] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1454.243986] env[63371]: value = "task-1773882" [ 1454.243986] env[63371]: _type = "Task" [ 1454.243986] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.264218] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 50d5eac1-0752-4089-948c-b04439df6f6c/50d5eac1-0752-4089-948c-b04439df6f6c.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1454.265971] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04f70dd6-e984-4537-b8e0-df0b0b4a4db6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.285368] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773882, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.293651] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1454.293651] env[63371]: value = "task-1773883" [ 1454.293651] env[63371]: _type = "Task" [ 1454.293651] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.306837] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773883, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.331888] env[63371]: DEBUG nova.scheduler.client.report [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1454.433833] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.515786] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5294f5f8-4a47-55b1-b253-66dd4af369a3, 'name': SearchDatastore_Task, 'duration_secs': 0.020914} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.516105] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1454.516375] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 855005ae-3b0e-4ad7-80cf-266075fc6d0f/855005ae-3b0e-4ad7-80cf-266075fc6d0f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1454.516689] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d68d3396-ac3d-49ba-9c1f-656ed420804d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.527824] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1454.527824] env[63371]: value = "task-1773884" [ 1454.527824] env[63371]: _type = "Task" [ 1454.527824] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.544717] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773884, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.591555] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquiring lock "e00c2e45-b8bc-440b-8b58-a21f127192c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.591773] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.591911] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquiring lock "e00c2e45-b8bc-440b-8b58-a21f127192c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1454.592100] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.592377] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.597961] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773875, 'name': PowerOnVM_Task, 'duration_secs': 1.548448} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.598542] env[63371]: INFO nova.compute.manager [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Terminating instance [ 1454.600276] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1454.600509] env[63371]: INFO nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Took 12.52 seconds to spawn the instance on the hypervisor. [ 1454.600691] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1454.605525] env[63371]: DEBUG nova.compute.manager [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1454.607080] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1454.607080] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331507ca-42f5-4dce-bc4c-54ff76a101a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.613742] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa1fc4f6-3177-4159-9906-965a9808a45e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.628669] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1454.628669] env[63371]: value = "task-1773885" [ 1454.628669] env[63371]: _type = "Task" [ 1454.628669] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.756986] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773882, 'name': Rename_Task, 'duration_secs': 0.356163} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.757386] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1454.757676] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f53f4ac8-3cc9-420f-8d27-ff924b03cfc1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1454.766708] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1454.766708] env[63371]: value = "task-1773887" [ 1454.766708] env[63371]: _type = "Task" [ 1454.766708] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1454.780440] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.812312] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773883, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.842085] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.879s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1454.842446] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1454.845701] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.969s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1454.846049] env[63371]: DEBUG nova.objects.instance [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lazy-loading 'resources' on Instance uuid 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1454.934732] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.041268] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773884, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.142701] env[63371]: INFO nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Took 51.67 seconds to build instance. [ 1455.147970] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773885, 'name': PowerOffVM_Task, 'duration_secs': 0.301756} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.150097] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1455.150097] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1455.150097] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368225', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'name': 'volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e00c2e45-b8bc-440b-8b58-a21f127192c7', 'attached_at': '', 'detached_at': '', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'serial': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1455.150097] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d14f97b-ee6d-4887-a9b1-ee04230f29aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.177756] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d45058-c151-4f2f-8fc3-876ed9ce581d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.190529] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4a11b3b-cd5a-4824-9f58-905bc4bf0bcf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.219176] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0237dd90-0ef6-4c61-9e29-c737e658e4e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.242806] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] The volume has not been displaced from its original location: [datastore1] volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8/volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1455.248380] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Reconfiguring VM instance instance-0000001a to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1455.248843] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec68650d-2746-4fd7-9d8e-629ef5eb9a32 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.273195] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1455.273195] env[63371]: value = "task-1773888" [ 1455.273195] env[63371]: _type = "Task" [ 1455.273195] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.288869] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773888, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.293270] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773887, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.307684] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773883, 'name': ReconfigVM_Task, 'duration_secs': 0.936746} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.311278] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 50d5eac1-0752-4089-948c-b04439df6f6c/50d5eac1-0752-4089-948c-b04439df6f6c.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1455.311278] env[63371]: DEBUG nova.compute.manager [req-d00a7c68-6f12-444c-b12b-fe7e08686e1b req-c920bf60-a17c-450b-9207-d52604dcf722 service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Received event network-vif-plugged-78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1455.311278] env[63371]: DEBUG oslo_concurrency.lockutils [req-d00a7c68-6f12-444c-b12b-fe7e08686e1b req-c920bf60-a17c-450b-9207-d52604dcf722 service nova] Acquiring lock "be37eb1c-8582-4446-afd6-ae11a8cadf95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1455.311686] env[63371]: DEBUG oslo_concurrency.lockutils [req-d00a7c68-6f12-444c-b12b-fe7e08686e1b req-c920bf60-a17c-450b-9207-d52604dcf722 service nova] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1455.311686] env[63371]: DEBUG oslo_concurrency.lockutils [req-d00a7c68-6f12-444c-b12b-fe7e08686e1b req-c920bf60-a17c-450b-9207-d52604dcf722 service nova] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.311897] env[63371]: DEBUG nova.compute.manager [req-d00a7c68-6f12-444c-b12b-fe7e08686e1b req-c920bf60-a17c-450b-9207-d52604dcf722 service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] No waiting events found dispatching network-vif-plugged-78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1455.312121] env[63371]: WARNING nova.compute.manager [req-d00a7c68-6f12-444c-b12b-fe7e08686e1b req-c920bf60-a17c-450b-9207-d52604dcf722 service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Received unexpected event network-vif-plugged-78d7a9b4-2512-4b55-95e3-50aa146658fa for instance with vm_state building and task_state spawning. [ 1455.312549] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf59f56b-6dd0-4f6c-a1be-bf1f920f2e9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.324137] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1455.324137] env[63371]: value = "task-1773889" [ 1455.324137] env[63371]: _type = "Task" [ 1455.324137] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.331888] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773889, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.353026] env[63371]: DEBUG nova.compute.utils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1455.357757] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1455.358244] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1455.435469] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.492887] env[63371]: DEBUG nova.policy [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58195dc4ac74493cbe7ed4fbe63bce54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28cc236260a947899c5e09bca25f7360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1455.545110] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773884, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.838766} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.547862] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 855005ae-3b0e-4ad7-80cf-266075fc6d0f/855005ae-3b0e-4ad7-80cf-266075fc6d0f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1455.548636] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1455.549197] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-af2baa94-702b-43ee-9d33-260183bacd93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.558353] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1455.558353] env[63371]: value = "task-1773890" [ 1455.558353] env[63371]: _type = "Task" [ 1455.558353] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.572617] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773890, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.621748] env[63371]: DEBUG nova.network.neutron [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Successfully updated port: 78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1455.645652] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.984s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1455.784384] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773887, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.789605] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773888, 'name': ReconfigVM_Task, 'duration_secs': 0.489789} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.792612] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Reconfigured VM instance instance-0000001a to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1455.795640] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c523953f-5114-4ff1-8d3f-36e327d98b85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.816576] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1455.816576] env[63371]: value = "task-1773891" [ 1455.816576] env[63371]: _type = "Task" [ 1455.816576] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.826021] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773891, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.841123] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773889, 'name': Rename_Task, 'duration_secs': 0.421885} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.841494] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1455.841757] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8bdbcb7e-56f7-4a87-8b1c-7f16381458c1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.852348] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1455.852348] env[63371]: value = "task-1773892" [ 1455.852348] env[63371]: _type = "Task" [ 1455.852348] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1455.859118] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1455.871375] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773892, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1455.941501] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773874, 'name': CloneVM_Task, 'duration_secs': 3.321967} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1455.941501] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Created linked-clone VM from snapshot [ 1455.941501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441320a7-b320-4b4a-bc61-90fdab267dd8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.952011] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Uploading image d6a027d0-1605-4385-9e91-38b4326d06e7 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1455.986995] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1455.986995] env[63371]: value = "vm-368311" [ 1455.986995] env[63371]: _type = "VirtualMachine" [ 1455.986995] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1455.987336] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5e5e3b16-5722-4946-9204-489397ea8335 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.999403] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lease: (returnval){ [ 1455.999403] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b31479-ffd3-a22b-65e4-d1cccf271ed7" [ 1455.999403] env[63371]: _type = "HttpNfcLease" [ 1455.999403] env[63371]: } obtained for exporting VM: (result){ [ 1455.999403] env[63371]: value = "vm-368311" [ 1455.999403] env[63371]: _type = "VirtualMachine" [ 1455.999403] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1455.999722] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the lease: (returnval){ [ 1455.999722] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b31479-ffd3-a22b-65e4-d1cccf271ed7" [ 1455.999722] env[63371]: _type = "HttpNfcLease" [ 1455.999722] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1456.005053] env[63371]: DEBUG nova.network.neutron [-] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1456.008486] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1456.008486] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b31479-ffd3-a22b-65e4-d1cccf271ed7" [ 1456.008486] env[63371]: _type = "HttpNfcLease" [ 1456.008486] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1456.037525] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac614c17-a719-4628-8643-cde3b485ffe2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.046556] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddeda3b9-0e91-4795-89b0-c8da7815a609 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.085025] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9318c52c-98c1-4137-9975-7e8e04bba280 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.096328] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5d2313-6405-42d3-8c0f-53aa2872ab73 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.102079] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773890, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088154} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.102079] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1456.102079] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a65c427-e1f2-408b-8036-e8a98ce4e4bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.113181] env[63371]: DEBUG nova.compute.provider_tree [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1456.128059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1456.128059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquired lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1456.128059] env[63371]: DEBUG nova.network.neutron [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1456.139625] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 855005ae-3b0e-4ad7-80cf-266075fc6d0f/855005ae-3b0e-4ad7-80cf-266075fc6d0f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1456.141893] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04ffa1ec-6b2c-4043-b3b2-a82636af185b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.156272] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1456.165828] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Successfully created port: 59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1456.169288] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1456.169288] env[63371]: value = "task-1773894" [ 1456.169288] env[63371]: _type = "Task" [ 1456.169288] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.181011] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773894, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.281029] env[63371]: DEBUG oslo_vmware.api [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773887, 'name': PowerOnVM_Task, 'duration_secs': 1.301992} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.281401] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1456.281546] env[63371]: INFO nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Took 11.41 seconds to spawn the instance on the hypervisor. [ 1456.281728] env[63371]: DEBUG nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1456.282589] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f383b5b-4f85-49fb-bf07-60987a6758ee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.327012] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773891, 'name': ReconfigVM_Task, 'duration_secs': 0.206} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.327347] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368225', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'name': 'volume-1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'e00c2e45-b8bc-440b-8b58-a21f127192c7', 'attached_at': '', 'detached_at': '', 'volume_id': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8', 'serial': '1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1456.327634] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1456.328462] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e52de6c-c076-42d7-958c-81e08e56d95d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.336594] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1456.336838] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a5fbd98-30cb-44c8-ac17-42164d155587 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.371526] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773892, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.505087] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1456.505416] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1456.505517] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Deleting the datastore file [datastore1] e00c2e45-b8bc-440b-8b58-a21f127192c7 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1456.505816] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a224b713-800e-4b0a-88c6-dfbc9a2ba9ad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.510299] env[63371]: INFO nova.compute.manager [-] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Took 2.76 seconds to deallocate network for instance. [ 1456.514020] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1456.514020] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b31479-ffd3-a22b-65e4-d1cccf271ed7" [ 1456.514020] env[63371]: _type = "HttpNfcLease" [ 1456.514020] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1456.518981] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1456.518981] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b31479-ffd3-a22b-65e4-d1cccf271ed7" [ 1456.518981] env[63371]: _type = "HttpNfcLease" [ 1456.518981] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1456.518981] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for the task: (returnval){ [ 1456.518981] env[63371]: value = "task-1773896" [ 1456.518981] env[63371]: _type = "Task" [ 1456.518981] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.520104] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e70c977-9101-4f5c-a277-5e529b08c0b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.532133] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.535173] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b8c048-9182-1a2e-6ee5-cf1c575dcf3e/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1456.535395] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b8c048-9182-1a2e-6ee5-cf1c575dcf3e/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1456.638857] env[63371]: ERROR nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [req-3e64947c-9d5b-4ca7-8e2e-9b82f9ffcabf] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3e64947c-9d5b-4ca7-8e2e-9b82f9ffcabf"}]} [ 1456.648211] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ee64bd4c-72c8-4fd9-b0f7-9021c705b398 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.666472] env[63371]: DEBUG nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1456.683680] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773894, 'name': ReconfigVM_Task, 'duration_secs': 0.371285} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.687620] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 855005ae-3b0e-4ad7-80cf-266075fc6d0f/855005ae-3b0e-4ad7-80cf-266075fc6d0f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1456.689668] env[63371]: DEBUG nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1456.689993] env[63371]: DEBUG nova.compute.provider_tree [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1456.694235] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1456.694235] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-81ec9fdd-9a41-4488-857f-3de55a012a76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.702876] env[63371]: DEBUG nova.network.neutron [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1456.708117] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1456.708117] env[63371]: value = "task-1773897" [ 1456.708117] env[63371]: _type = "Task" [ 1456.708117] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1456.708663] env[63371]: DEBUG nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1456.722208] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773897, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1456.752826] env[63371]: DEBUG nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1456.806823] env[63371]: INFO nova.compute.manager [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Took 49.21 seconds to build instance. [ 1456.872248] env[63371]: DEBUG oslo_vmware.api [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773892, 'name': PowerOnVM_Task, 'duration_secs': 0.882056} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1456.873279] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1456.873279] env[63371]: INFO nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Took 5.60 seconds to spawn the instance on the hypervisor. [ 1456.873279] env[63371]: DEBUG nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1456.874887] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1456.877317] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c6d6db-5c98-422c-b564-287b6fe54777 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.914915] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1456.915164] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1456.915286] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1456.915465] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1456.915609] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1456.915786] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1456.915948] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1456.916117] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1456.916283] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1456.916445] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1456.916616] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1456.917931] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab03d0b-40e2-4ce1-9714-37e9553cf524 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.935515] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41683115-cf46-417e-a2eb-723d6e40a54a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.002738] env[63371]: DEBUG nova.network.neutron [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Updating instance_info_cache with network_info: [{"id": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "address": "fa:16:3e:b4:74:9c", "network": {"id": "718e3616-e606-482f-90b3-aaac39f38b39", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-354664269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1771acadeced40a6889b7dfb974e7886", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d7a9b4-25", "ovs_interfaceid": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1457.020872] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1457.036331] env[63371]: DEBUG oslo_vmware.api [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Task: {'id': task-1773896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129032} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.039364] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1457.039770] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1457.040053] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1457.040217] env[63371]: INFO nova.compute.manager [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Took 2.43 seconds to destroy the instance on the hypervisor. [ 1457.041108] env[63371]: DEBUG oslo.service.loopingcall [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1457.043298] env[63371]: DEBUG nova.compute.manager [-] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1457.043298] env[63371]: DEBUG nova.network.neutron [-] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1457.227095] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773897, 'name': Rename_Task, 'duration_secs': 0.176755} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.227240] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1457.227632] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ee94aa2-5a2b-4d70-aea9-3e5582f416d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.236510] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1457.236510] env[63371]: value = "task-1773898" [ 1457.236510] env[63371]: _type = "Task" [ 1457.236510] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.246822] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773898, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.316531] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c1bace46-9bd0-4b9e-b262-39a00c5d1ea4 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.616s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1457.415553] env[63371]: INFO nova.compute.manager [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Took 36.39 seconds to build instance. [ 1457.421744] env[63371]: DEBUG nova.compute.manager [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Received event network-vif-deleted-1846a8cd-46dc-4187-af60-d4e4eee750dc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1457.421885] env[63371]: DEBUG nova.compute.manager [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Received event network-changed-78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1457.422595] env[63371]: DEBUG nova.compute.manager [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Refreshing instance network info cache due to event network-changed-78d7a9b4-2512-4b55-95e3-50aa146658fa. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1457.422986] env[63371]: DEBUG oslo_concurrency.lockutils [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] Acquiring lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1457.442944] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40720b46-c4dc-421e-a014-4619ad88f3d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.453519] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd42d736-9b76-481f-806d-1b8302aeafd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.487831] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33182f54-745f-4c66-800b-90b6e3841b91 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.496914] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e4a5c3-9e27-49b1-9e47-cea71e61a7cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.505470] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Releasing lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1457.505470] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Instance network_info: |[{"id": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "address": "fa:16:3e:b4:74:9c", "network": {"id": "718e3616-e606-482f-90b3-aaac39f38b39", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-354664269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1771acadeced40a6889b7dfb974e7886", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d7a9b4-25", "ovs_interfaceid": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1457.513430] env[63371]: DEBUG oslo_concurrency.lockutils [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] Acquired lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1457.513723] env[63371]: DEBUG nova.network.neutron [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Refreshing network info cache for port 78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1457.515301] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b4:74:9c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '257e5ea7-8b80-4301-9900-a754f1fe2031', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '78d7a9b4-2512-4b55-95e3-50aa146658fa', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1457.523719] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Creating folder: Project (1771acadeced40a6889b7dfb974e7886). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1457.524401] env[63371]: DEBUG nova.compute.provider_tree [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1457.528672] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f54ebaf-0520-4281-87b1-238980664678 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.542651] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Created folder: Project (1771acadeced40a6889b7dfb974e7886) in parent group-v368199. [ 1457.542889] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Creating folder: Instances. Parent ref: group-v368312. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1457.543159] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac4b3495-7687-481b-aecf-c57563f23e49 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.555289] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Created folder: Instances in parent group-v368312. [ 1457.555537] env[63371]: DEBUG oslo.service.loopingcall [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1457.556095] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1457.556329] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a83e5ed5-cdaa-41f2-96ad-2c30067cb0b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.585031] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1457.585031] env[63371]: value = "task-1773901" [ 1457.585031] env[63371]: _type = "Task" [ 1457.585031] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1457.594436] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773901, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1457.751023] env[63371]: DEBUG oslo_vmware.api [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773898, 'name': PowerOnVM_Task, 'duration_secs': 0.500702} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1457.751023] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1457.751023] env[63371]: INFO nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Took 10.06 seconds to spawn the instance on the hypervisor. [ 1457.751023] env[63371]: DEBUG nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1457.751023] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-302a7a4c-5431-4a61-b787-05b31d476c8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1457.821331] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1457.927143] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b4319b2-0461-433f-a191-31163b51c59f tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "50d5eac1-0752-4089-948c-b04439df6f6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.499s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.032983] env[63371]: DEBUG nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1458.100803] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773901, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.281839] env[63371]: INFO nova.compute.manager [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Took 47.35 seconds to build instance. [ 1458.352753] env[63371]: DEBUG nova.compute.manager [None req-8f5b2cac-c04f-45bb-bb67-b29ebdf578e1 tempest-ServerDiagnosticsV248Test-122985939 tempest-ServerDiagnosticsV248Test-122985939-project-admin] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1458.354743] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.357985] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6883fbe5-ea30-4b44-af0f-a3b0e1710a17 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.364272] env[63371]: DEBUG nova.network.neutron [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Updated VIF entry in instance network info cache for port 78d7a9b4-2512-4b55-95e3-50aa146658fa. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1458.364858] env[63371]: DEBUG nova.network.neutron [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Updating instance_info_cache with network_info: [{"id": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "address": "fa:16:3e:b4:74:9c", "network": {"id": "718e3616-e606-482f-90b3-aaac39f38b39", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-354664269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1771acadeced40a6889b7dfb974e7886", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d7a9b4-25", "ovs_interfaceid": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1458.373451] env[63371]: INFO nova.compute.manager [None req-8f5b2cac-c04f-45bb-bb67-b29ebdf578e1 tempest-ServerDiagnosticsV248Test-122985939 tempest-ServerDiagnosticsV248Test-122985939-project-admin] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Retrieving diagnostics [ 1458.374866] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af761f53-225d-4c51-9649-9810ce946e87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.424374] env[63371]: DEBUG nova.network.neutron [-] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1458.433647] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1458.488991] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Successfully updated port: 59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1458.538741] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.693s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.541319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.499s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.541642] env[63371]: DEBUG nova.objects.instance [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lazy-loading 'resources' on Instance uuid ca53accc-a15f-4503-87e5-7cbf3e2c0b43 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1458.573289] env[63371]: INFO nova.scheduler.client.report [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Deleted allocations for instance 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6 [ 1458.597954] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773901, 'name': CreateVM_Task, 'duration_secs': 0.55993} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1458.598171] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1458.598906] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.599345] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.599595] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1458.600010] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe7643a2-fc21-4d05-8b28-e2e9e13320fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.619181] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1458.619181] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5283ae59-0d87-5e56-2c98-b0a22aeabffe" [ 1458.619181] env[63371]: _type = "Task" [ 1458.619181] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.629151] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5283ae59-0d87-5e56-2c98-b0a22aeabffe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.727141] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.727461] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.727688] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.727895] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1458.728128] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.731563] env[63371]: INFO nova.compute.manager [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Terminating instance [ 1458.733960] env[63371]: DEBUG nova.compute.manager [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1458.734178] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1458.735611] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c3b962-84e7-46bd-b7b0-ece472273cab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.745809] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1458.746177] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e3ea91f-6569-440b-9195-60157b712712 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1458.755181] env[63371]: DEBUG oslo_vmware.api [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1458.755181] env[63371]: value = "task-1773902" [ 1458.755181] env[63371]: _type = "Task" [ 1458.755181] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1458.765912] env[63371]: DEBUG oslo_vmware.api [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1458.787063] env[63371]: DEBUG oslo_concurrency.lockutils [None req-06243931-b221-4bb0-a5e8-12ceb919e850 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 97.614s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1458.868950] env[63371]: DEBUG oslo_concurrency.lockutils [req-0f210d44-29ee-4a4e-b653-dcae8b356661 req-d0028372-acbb-4f56-b43b-b09ddcf80c2a service nova] Releasing lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1458.931713] env[63371]: INFO nova.compute.manager [-] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Took 1.89 seconds to deallocate network for instance. [ 1458.966518] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1458.996255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "refresh_cache-64fc862c-a755-4cac-997b-7a8328638269" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1458.996255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "refresh_cache-64fc862c-a755-4cac-997b-7a8328638269" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1458.996255] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1459.081584] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17b2cb22-72c3-43ad-81ed-b606179b0133 tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.287s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.135351] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5283ae59-0d87-5e56-2c98-b0a22aeabffe, 'name': SearchDatastore_Task, 'duration_secs': 0.01413} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.135970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.135970] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1459.136151] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.136299] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1459.136475] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1459.136747] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-382bca50-85cf-4432-bdd8-fb935337f9f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.147988] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1459.148050] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1459.149041] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8a1549d-5a21-4dec-a0ed-92c67a5b10a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.158946] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1459.158946] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ccdabe-4b0c-192b-8e0d-5757bc772ca4" [ 1459.158946] env[63371]: _type = "Task" [ 1459.158946] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.168980] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ccdabe-4b0c-192b-8e0d-5757bc772ca4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.267558] env[63371]: DEBUG oslo_vmware.api [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773902, 'name': PowerOffVM_Task, 'duration_secs': 0.270797} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.267928] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1459.268148] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1459.268450] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58e84cfa-c002-435f-aadb-15880c9bb760 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.295008] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1459.364917] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1459.365190] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1459.365398] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Deleting the datastore file [datastore1] cbcdfe1a-86a4-4a12-99b5-44d291d41769 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1459.368464] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca6003d2-b0c1-4e57-a8b4-5609dcd37989 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.379291] env[63371]: DEBUG oslo_vmware.api [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for the task: (returnval){ [ 1459.379291] env[63371]: value = "task-1773904" [ 1459.379291] env[63371]: _type = "Task" [ 1459.379291] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.400768] env[63371]: DEBUG oslo_vmware.api [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773904, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.502892] env[63371]: INFO nova.compute.manager [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Took 0.57 seconds to detach 1 volumes for instance. [ 1459.505340] env[63371]: DEBUG nova.compute.manager [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Deleting volume: 1df79e64-d8fc-4b30-b6ec-f4af9b5b9bc8 {{(pid=63371) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1459.528411] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1459.673543] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ccdabe-4b0c-192b-8e0d-5757bc772ca4, 'name': SearchDatastore_Task, 'duration_secs': 0.015358} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.678514] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "b5e259ea-d103-41c6-84b3-748813bb514d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.679146] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "b5e259ea-d103-41c6-84b3-748813bb514d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.679360] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "b5e259ea-d103-41c6-84b3-748813bb514d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.679454] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "b5e259ea-d103-41c6-84b3-748813bb514d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.679687] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "b5e259ea-d103-41c6-84b3-748813bb514d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.682414] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-858c5561-eed0-49ff-8fb7-02751eca0909 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.684862] env[63371]: INFO nova.compute.manager [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Terminating instance [ 1459.688852] env[63371]: DEBUG nova.compute.manager [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1459.689899] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1459.690367] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ae3a2c-a9b8-4486-9fe5-03808248c9af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.699126] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1459.699126] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528e3763-75dc-ad00-3aed-b5dc551c04dd" [ 1459.699126] env[63371]: _type = "Task" [ 1459.699126] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.701713] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5baf98cc-9fbd-4d7f-8454-b1ab402bc350 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.713600] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1459.716238] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c455da5-2a6e-4937-b59e-aa1569bb4267 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.722062] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c07d27-c61a-491a-be29-f80acaea18c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.725813] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528e3763-75dc-ad00-3aed-b5dc551c04dd, 'name': SearchDatastore_Task, 'duration_secs': 0.014844} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.727511] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1459.727750] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] be37eb1c-8582-4446-afd6-ae11a8cadf95/be37eb1c-8582-4446-afd6-ae11a8cadf95.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1459.729269] env[63371]: DEBUG nova.compute.manager [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Received event network-vif-deleted-5279ae43-ba7a-4b25-b00c-7ffe5fef1ce0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1459.729465] env[63371]: DEBUG nova.compute.manager [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Received event network-vif-plugged-59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1459.729645] env[63371]: DEBUG oslo_concurrency.lockutils [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] Acquiring lock "64fc862c-a755-4cac-997b-7a8328638269-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.729840] env[63371]: DEBUG oslo_concurrency.lockutils [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] Lock "64fc862c-a755-4cac-997b-7a8328638269-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.729996] env[63371]: DEBUG oslo_concurrency.lockutils [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] Lock "64fc862c-a755-4cac-997b-7a8328638269-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.730257] env[63371]: DEBUG nova.compute.manager [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] No waiting events found dispatching network-vif-plugged-59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1459.730466] env[63371]: WARNING nova.compute.manager [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Received unexpected event network-vif-plugged-59bb4dc3-13e6-4180-bec1-3a41954f8d62 for instance with vm_state building and task_state spawning. [ 1459.730660] env[63371]: DEBUG nova.compute.manager [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Received event network-changed-59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1459.731087] env[63371]: DEBUG nova.compute.manager [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Refreshing instance network info cache due to event network-changed-59bb4dc3-13e6-4180-bec1-3a41954f8d62. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1459.731357] env[63371]: DEBUG oslo_concurrency.lockutils [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] Acquiring lock "refresh_cache-64fc862c-a755-4cac-997b-7a8328638269" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1459.732768] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-922964d0-c675-41bc-958c-c2c1f024f8df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.764851] env[63371]: DEBUG oslo_vmware.api [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1459.764851] env[63371]: value = "task-1773906" [ 1459.764851] env[63371]: _type = "Task" [ 1459.764851] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.766122] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Updating instance_info_cache with network_info: [{"id": "59bb4dc3-13e6-4180-bec1-3a41954f8d62", "address": "fa:16:3e:68:06:08", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bb4dc3-13", "ovs_interfaceid": "59bb4dc3-13e6-4180-bec1-3a41954f8d62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1459.768316] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87e83d8f-1177-426a-b739-294be4459f9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.777214] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1459.777214] env[63371]: value = "task-1773907" [ 1459.777214] env[63371]: _type = "Task" [ 1459.777214] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.789248] env[63371]: DEBUG oslo_vmware.api [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773906, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.793100] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63ae73e-3f15-4a74-bf65-1182e37398f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.804760] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773907, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.805308] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "201a2d1e-9e2c-4c07-92be-200408874ad4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.805567] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.805810] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "201a2d1e-9e2c-4c07-92be-200408874ad4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.806066] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.806245] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.819525] env[63371]: DEBUG nova.compute.provider_tree [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1459.822049] env[63371]: INFO nova.compute.manager [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Terminating instance [ 1459.823441] env[63371]: DEBUG nova.compute.manager [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1459.823551] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1459.826216] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d167fd0-871a-4d91-b578-d2308cb49a57 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.834114] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1459.834466] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0e520b9-3ee7-4ef4-a6e3-c5c3185b0d05 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.843604] env[63371]: DEBUG oslo_vmware.api [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1459.843604] env[63371]: value = "task-1773908" [ 1459.843604] env[63371]: _type = "Task" [ 1459.843604] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.844758] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.855594] env[63371]: DEBUG oslo_vmware.api [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773908, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1459.889841] env[63371]: DEBUG oslo_vmware.api [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Task: {'id': task-1773904, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.324073} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1459.890138] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1459.890505] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1459.890720] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1459.890905] env[63371]: INFO nova.compute.manager [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1459.891185] env[63371]: DEBUG oslo.service.loopingcall [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1459.891575] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.891796] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.891986] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1459.892184] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1459.892783] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1459.893915] env[63371]: DEBUG nova.compute.manager [-] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1459.894023] env[63371]: DEBUG nova.network.neutron [-] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1459.895950] env[63371]: INFO nova.compute.manager [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Terminating instance [ 1459.901022] env[63371]: DEBUG nova.compute.manager [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1459.901383] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1459.902061] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68617bc9-2ea0-411c-97fa-92866dc20a85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.912441] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1459.912805] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5f2e6b5d-6d22-4095-8869-387dc2a972ad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1459.921896] env[63371]: DEBUG oslo_vmware.api [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1459.921896] env[63371]: value = "task-1773909" [ 1459.921896] env[63371]: _type = "Task" [ 1459.921896] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1459.934408] env[63371]: DEBUG oslo_vmware.api [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773909, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.058962] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1460.272504] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "refresh_cache-64fc862c-a755-4cac-997b-7a8328638269" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1460.272848] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Instance network_info: |[{"id": "59bb4dc3-13e6-4180-bec1-3a41954f8d62", "address": "fa:16:3e:68:06:08", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bb4dc3-13", "ovs_interfaceid": "59bb4dc3-13e6-4180-bec1-3a41954f8d62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1460.273433] env[63371]: DEBUG oslo_concurrency.lockutils [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] Acquired lock "refresh_cache-64fc862c-a755-4cac-997b-7a8328638269" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1460.273618] env[63371]: DEBUG nova.network.neutron [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Refreshing network info cache for port 59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1460.275368] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:68:06:08', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '842f738f-eaa4-4444-a9bf-90d2b533184c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59bb4dc3-13e6-4180-bec1-3a41954f8d62', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1460.284504] env[63371]: DEBUG oslo.service.loopingcall [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1460.286499] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1460.290368] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31fa72e5-7eef-47ce-be4c-d1b356186d5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.314452] env[63371]: DEBUG oslo_vmware.api [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773906, 'name': PowerOffVM_Task, 'duration_secs': 0.478686} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.316100] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1460.316375] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1460.316684] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-538ba89e-01cd-4c0e-a155-39c036198912 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.326297] env[63371]: DEBUG nova.scheduler.client.report [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1460.328885] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773907, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.333155] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1460.333155] env[63371]: value = "task-1773910" [ 1460.333155] env[63371]: _type = "Task" [ 1460.333155] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.348830] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773910, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.360540] env[63371]: DEBUG oslo_vmware.api [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773908, 'name': PowerOffVM_Task, 'duration_secs': 0.373073} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.360882] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1460.361092] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1460.361401] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2fb17282-a705-4a91-aab8-a3212bfb9c6c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.441038] env[63371]: DEBUG oslo_vmware.api [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773909, 'name': PowerOffVM_Task, 'duration_secs': 0.300417} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.441038] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1460.441038] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1460.441038] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1460.441038] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1460.442060] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleting the datastore file [datastore1] b5e259ea-d103-41c6-84b3-748813bb514d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1460.442060] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9b0b689-82ac-4735-bec8-6c858b86c5be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.442961] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0e08a0b-ce4c-4912-99a8-6f8f29a334e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.454800] env[63371]: DEBUG oslo_vmware.api [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1460.454800] env[63371]: value = "task-1773913" [ 1460.454800] env[63371]: _type = "Task" [ 1460.454800] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.471666] env[63371]: DEBUG oslo_vmware.api [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773913, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.477179] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1460.477179] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1460.477508] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Deleting the datastore file [datastore1] 201a2d1e-9e2c-4c07-92be-200408874ad4 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1460.478046] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d9929fa-ba7f-4aec-808e-2b878db7284d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.491793] env[63371]: DEBUG oslo_vmware.api [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for the task: (returnval){ [ 1460.491793] env[63371]: value = "task-1773915" [ 1460.491793] env[63371]: _type = "Task" [ 1460.491793] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.502999] env[63371]: DEBUG oslo_vmware.api [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773915, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.729049] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1460.729670] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1460.729670] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleting the datastore file [datastore1] 855005ae-3b0e-4ad7-80cf-266075fc6d0f {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1460.731320] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e4390fc-5085-4dcf-9a19-9f993f410d36 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.738154] env[63371]: DEBUG oslo_vmware.api [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1460.738154] env[63371]: value = "task-1773916" [ 1460.738154] env[63371]: _type = "Task" [ 1460.738154] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.749565] env[63371]: DEBUG oslo_vmware.api [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773916, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.804313] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773907, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713087} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.804313] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] be37eb1c-8582-4446-afd6-ae11a8cadf95/be37eb1c-8582-4446-afd6-ae11a8cadf95.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1460.804313] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1460.804313] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-da8862aa-a744-4e56-bc56-1c949e2222e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1460.813645] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1460.813645] env[63371]: value = "task-1773917" [ 1460.813645] env[63371]: _type = "Task" [ 1460.813645] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1460.825489] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773917, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.831015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.289s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1460.833761] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.259s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1460.835113] env[63371]: INFO nova.compute.claims [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1460.856618] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773910, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1460.867660] env[63371]: INFO nova.scheduler.client.report [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Deleted allocations for instance ca53accc-a15f-4503-87e5-7cbf3e2c0b43 [ 1460.967634] env[63371]: DEBUG oslo_vmware.api [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1773913, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.35978} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1460.968126] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1460.968435] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1460.968834] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1460.969159] env[63371]: INFO nova.compute.manager [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1460.969541] env[63371]: DEBUG oslo.service.loopingcall [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1460.970187] env[63371]: DEBUG nova.compute.manager [-] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1460.970408] env[63371]: DEBUG nova.network.neutron [-] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1461.006023] env[63371]: DEBUG oslo_vmware.api [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Task: {'id': task-1773915, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.378601} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.006023] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1461.006023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1461.006023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1461.006023] env[63371]: INFO nova.compute.manager [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1461.006340] env[63371]: DEBUG oslo.service.loopingcall [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1461.006340] env[63371]: DEBUG nova.compute.manager [-] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1461.006340] env[63371]: DEBUG nova.network.neutron [-] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1461.250579] env[63371]: DEBUG oslo_vmware.api [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1773916, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.326306} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.251065] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1461.251586] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1461.251920] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1461.252221] env[63371]: INFO nova.compute.manager [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Took 1.35 seconds to destroy the instance on the hypervisor. [ 1461.252596] env[63371]: DEBUG oslo.service.loopingcall [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1461.253100] env[63371]: DEBUG nova.compute.manager [-] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1461.255026] env[63371]: DEBUG nova.network.neutron [-] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1461.255562] env[63371]: DEBUG nova.network.neutron [-] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.325877] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773917, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103104} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.329952] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1461.329952] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b75c990-e64e-4e1c-9b3c-4d9b652ad5af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.360827] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] be37eb1c-8582-4446-afd6-ae11a8cadf95/be37eb1c-8582-4446-afd6-ae11a8cadf95.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1461.361909] env[63371]: DEBUG nova.network.neutron [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Updated VIF entry in instance network info cache for port 59bb4dc3-13e6-4180-bec1-3a41954f8d62. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1461.362261] env[63371]: DEBUG nova.network.neutron [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Updating instance_info_cache with network_info: [{"id": "59bb4dc3-13e6-4180-bec1-3a41954f8d62", "address": "fa:16:3e:68:06:08", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59bb4dc3-13", "ovs_interfaceid": "59bb4dc3-13e6-4180-bec1-3a41954f8d62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1461.367686] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9523dd9c-d9c2-4268-ad29-c39940035f87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.385079] env[63371]: DEBUG oslo_concurrency.lockutils [req-6bd461da-e560-49e6-8f15-e08b53a8c480 req-f3d11f09-7f5c-4599-bdb5-3107de0c8701 service nova] Releasing lock "refresh_cache-64fc862c-a755-4cac-997b-7a8328638269" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.388151] env[63371]: DEBUG oslo_concurrency.lockutils [None req-936d43df-fd85-4fff-ba97-9139476e97fd tempest-ServersTestFqdnHostnames-1313030815 tempest-ServersTestFqdnHostnames-1313030815-project-member] Lock "ca53accc-a15f-4503-87e5-7cbf3e2c0b43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.097s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1461.395612] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773910, 'name': CreateVM_Task, 'duration_secs': 0.62755} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.399579] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1461.399579] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1461.399579] env[63371]: value = "task-1773918" [ 1461.399579] env[63371]: _type = "Task" [ 1461.399579] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.399579] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.399579] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.399871] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1461.400143] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91659464-12a2-4817-a91e-9c90fa31f81f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.411154] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1461.411154] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521a7b04-a00e-fd60-2a51-9aed4de3530e" [ 1461.411154] env[63371]: _type = "Task" [ 1461.411154] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.416274] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773918, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.429280] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521a7b04-a00e-fd60-2a51-9aed4de3530e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.758861] env[63371]: INFO nova.compute.manager [-] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Took 1.86 seconds to deallocate network for instance. [ 1461.915331] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773918, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1461.927199] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521a7b04-a00e-fd60-2a51-9aed4de3530e, 'name': SearchDatastore_Task, 'duration_secs': 0.018157} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1461.927518] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1461.927718] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1461.928028] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1461.928211] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1461.928409] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1461.931024] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7aa62ffc-18fc-421b-9b5d-a33b70b67bdc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.940579] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1461.940817] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1461.942839] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55788b4a-4435-4462-beb7-a705a3368ce2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1461.948930] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1461.948930] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52621bc7-eae8-f6de-ea29-f253a2a1cdb2" [ 1461.948930] env[63371]: _type = "Task" [ 1461.948930] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1461.961762] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52621bc7-eae8-f6de-ea29-f253a2a1cdb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.089464] env[63371]: DEBUG nova.network.neutron [-] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.118783] env[63371]: DEBUG nova.network.neutron [-] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.133361] env[63371]: DEBUG nova.compute.manager [req-3129fc09-b1d0-43fd-9974-d7ec90b6ff7c req-b8329c98-cc8e-48bd-a5b8-574c18ab745b service nova] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Received event network-vif-deleted-f65a228f-d220-4478-a274-65cee7a3df3c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.133361] env[63371]: DEBUG nova.compute.manager [req-3129fc09-b1d0-43fd-9974-d7ec90b6ff7c req-b8329c98-cc8e-48bd-a5b8-574c18ab745b service nova] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Received event network-vif-deleted-e450db3c-69a8-4e46-817e-eb4d6310fb9d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.133361] env[63371]: DEBUG nova.compute.manager [req-3129fc09-b1d0-43fd-9974-d7ec90b6ff7c req-b8329c98-cc8e-48bd-a5b8-574c18ab745b service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Received event network-vif-deleted-d1b325d0-b864-44be-8fe4-b923489752d0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.133537] env[63371]: INFO nova.compute.manager [req-3129fc09-b1d0-43fd-9974-d7ec90b6ff7c req-b8329c98-cc8e-48bd-a5b8-574c18ab745b service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Neutron deleted interface d1b325d0-b864-44be-8fe4-b923489752d0; detaching it from the instance and deleting it from the info cache [ 1462.133537] env[63371]: DEBUG nova.network.neutron [req-3129fc09-b1d0-43fd-9974-d7ec90b6ff7c req-b8329c98-cc8e-48bd-a5b8-574c18ab745b service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.145213] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "interface-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.145604] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1462.148342] env[63371]: DEBUG nova.objects.instance [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'flavor' on Instance uuid 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1462.269574] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1462.414640] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773918, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.433386] env[63371]: DEBUG nova.network.neutron [-] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1462.448033] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717f477d-60ba-427c-aac7-68ce83f96faa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.469751] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c871d79-95d9-4a29-b021-0ae49cbed0db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.483911] env[63371]: DEBUG nova.compute.manager [req-2573811d-85ba-4b61-a02b-53dffc37b08c req-d85f0f1d-b91f-45bd-bc11-2d7326d6158d service nova] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Received event network-vif-deleted-e4eb0664-61b0-40ee-a907-faa96a4e1c4d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1462.484545] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52621bc7-eae8-f6de-ea29-f253a2a1cdb2, 'name': SearchDatastore_Task, 'duration_secs': 0.032602} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1462.486993] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67db17e0-b3d9-43d5-86c8-1104fd414982 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.523273] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f0c023-52bc-4a98-a4ac-196a23c33b0a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.529594] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1462.529594] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f1b9d1-c4c2-6629-8dbc-9d9a8bd22a49" [ 1462.529594] env[63371]: _type = "Task" [ 1462.529594] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1462.541951] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0d9a34-ebcc-45ad-9977-5bc820ff242a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.549715] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f1b9d1-c4c2-6629-8dbc-9d9a8bd22a49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.564750] env[63371]: DEBUG nova.compute.provider_tree [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1462.592649] env[63371]: INFO nova.compute.manager [-] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Took 1.62 seconds to deallocate network for instance. [ 1462.629626] env[63371]: INFO nova.compute.manager [-] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Took 1.62 seconds to deallocate network for instance. [ 1462.641187] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ddc4cc49-b32e-47d1-9e37-c5b7de59e22e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.656599] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e4db20-c8e1-4b99-a2d1-796a8291912c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1462.698195] env[63371]: DEBUG nova.compute.manager [req-3129fc09-b1d0-43fd-9974-d7ec90b6ff7c req-b8329c98-cc8e-48bd-a5b8-574c18ab745b service nova] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Detach interface failed, port_id=d1b325d0-b864-44be-8fe4-b923489752d0, reason: Instance 201a2d1e-9e2c-4c07-92be-200408874ad4 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1462.779373] env[63371]: DEBUG nova.objects.instance [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'pci_requests' on Instance uuid 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1462.912504] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773918, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1462.936028] env[63371]: INFO nova.compute.manager [-] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Took 1.68 seconds to deallocate network for instance. [ 1463.046531] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f1b9d1-c4c2-6629-8dbc-9d9a8bd22a49, 'name': SearchDatastore_Task, 'duration_secs': 0.028692} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.046822] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1463.047488] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 64fc862c-a755-4cac-997b-7a8328638269/64fc862c-a755-4cac-997b-7a8328638269.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1463.047488] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f73348a8-497b-451b-8ac5-8d8ec69e71ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.051972] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.052207] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.057627] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1463.057627] env[63371]: value = "task-1773919" [ 1463.057627] env[63371]: _type = "Task" [ 1463.057627] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.068025] env[63371]: DEBUG nova.scheduler.client.report [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1463.071204] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.103038] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.136567] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.283885] env[63371]: DEBUG nova.objects.base [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Object Instance<7e463dd7-84a6-4e6d-ae8f-0860e3a20f05> lazy-loaded attributes: flavor,pci_requests {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1463.283885] env[63371]: DEBUG nova.network.neutron [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1463.333712] env[63371]: DEBUG nova.policy [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aacd81490704110b6cc6aba338883a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a5897667b6b47deb7ff5b64f9499f36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1463.412722] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773918, 'name': ReconfigVM_Task, 'duration_secs': 1.634441} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1463.412896] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Reconfigured VM instance instance-00000028 to attach disk [datastore1] be37eb1c-8582-4446-afd6-ae11a8cadf95/be37eb1c-8582-4446-afd6-ae11a8cadf95.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1463.413572] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e5dcf02-8287-4635-bfdd-e85aabbf9b80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.421325] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1463.421325] env[63371]: value = "task-1773920" [ 1463.421325] env[63371]: _type = "Task" [ 1463.421325] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1463.430459] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.442835] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1463.557852] env[63371]: DEBUG nova.compute.utils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1463.568927] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1463.573163] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.740s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1463.573693] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1463.576485] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.042s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1463.576718] env[63371]: DEBUG nova.objects.instance [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lazy-loading 'resources' on Instance uuid 9249f27a-1985-4be1-947c-e433c7aa26f1 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1463.634766] env[63371]: DEBUG nova.network.neutron [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Successfully created port: 8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1463.935969] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.058972] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1464.069776] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.080515] env[63371]: DEBUG nova.compute.utils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1464.084323] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1464.084497] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1464.125661] env[63371]: DEBUG nova.policy [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '58195dc4ac74493cbe7ed4fbe63bce54', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '28cc236260a947899c5e09bca25f7360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1464.399724] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Successfully created port: 9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1464.435296] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.573939] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1464.587226] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1464.656587] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99347f98-cc95-4459-811b-8c696bbdc2ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.667138] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c23bb3a-999a-4a36-ac47-48d2dddaff98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.705513] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-949d5b23-ffc5-4f22-9693-104f84ad8825 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.718764] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9aaa7c2-e81c-4171-a570-3534f6e840f5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1464.737588] env[63371]: DEBUG nova.compute.provider_tree [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1464.939310] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.071854] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.211961] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1465.212243] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.212478] env[63371]: INFO nova.compute.manager [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Attaching volume 5d39df22-c7dc-4c2a-8bed-1f0a74a568c4 to /dev/sdb [ 1465.241642] env[63371]: DEBUG nova.scheduler.client.report [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1465.252521] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b53aadf3-bbb0-4b02-b0c5-5cd22da9d0c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.260595] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc671b3-32a6-4a93-86ca-561ee4b580b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.276670] env[63371]: DEBUG nova.virt.block_device [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updating existing volume attachment record: 746f8f05-8459-49a4-a68d-b54f26e685f9 {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1465.325328] env[63371]: DEBUG nova.network.neutron [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Successfully updated port: 8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1465.436199] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.574838] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1465.599240] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1465.625882] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1465.625882] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1465.625882] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1465.626705] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1465.626705] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1465.626705] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1465.627137] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1465.627361] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1465.627678] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1465.627894] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1465.628284] env[63371]: DEBUG nova.virt.hardware [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1465.630301] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c01a640-6354-4487-a049-b39a9cfa86a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.640706] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5aa865-3635-4700-8cad-de9511839bac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1465.747096] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.170s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1465.753394] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.436s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1465.753394] env[63371]: INFO nova.compute.claims [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1465.778650] env[63371]: INFO nova.scheduler.client.report [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Deleted allocations for instance 9249f27a-1985-4be1-947c-e433c7aa26f1 [ 1465.828703] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1465.828911] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1465.829069] env[63371]: DEBUG nova.network.neutron [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1465.939522] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.073777] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773919, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.92498} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.074107] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 64fc862c-a755-4cac-997b-7a8328638269/64fc862c-a755-4cac-997b-7a8328638269.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1466.074373] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1466.074776] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2dfd9421-982c-4062-aba2-d31e63353586 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.084542] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1466.084542] env[63371]: value = "task-1773924" [ 1466.084542] env[63371]: _type = "Task" [ 1466.084542] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.097654] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773924, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.109050] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "0e2c8ced-198f-43be-9d41-703a7c590df4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.109503] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.289015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-454671e8-755e-49a5-8820-27fdc7566257 tempest-ServerGroupTestJSON-1005911884 tempest-ServerGroupTestJSON-1005911884-project-member] Lock "9249f27a-1985-4be1-947c-e433c7aa26f1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.681s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.369661] env[63371]: WARNING nova.network.neutron [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] 78c77028-c23a-4160-8b08-d336e8101b3b already exists in list: networks containing: ['78c77028-c23a-4160-8b08-d336e8101b3b']. ignoring it [ 1466.441927] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773920, 'name': Rename_Task, 'duration_secs': 2.532401} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.443283] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Successfully updated port: 9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1466.446100] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1466.446100] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01f31744-b120-4bd5-82e9-6ad9c61f4c0c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.455190] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1466.455190] env[63371]: value = "task-1773925" [ 1466.455190] env[63371]: _type = "Task" [ 1466.455190] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.468880] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773925, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.533411] env[63371]: DEBUG nova.compute.manager [req-ff89042e-7241-4506-8ea0-2999f81435c4 req-21afead0-88e7-46e0-802e-5e202967e3c1 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-vif-plugged-8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1466.533411] env[63371]: DEBUG oslo_concurrency.lockutils [req-ff89042e-7241-4506-8ea0-2999f81435c4 req-21afead0-88e7-46e0-802e-5e202967e3c1 service nova] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1466.533508] env[63371]: DEBUG oslo_concurrency.lockutils [req-ff89042e-7241-4506-8ea0-2999f81435c4 req-21afead0-88e7-46e0-802e-5e202967e3c1 service nova] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1466.534362] env[63371]: DEBUG oslo_concurrency.lockutils [req-ff89042e-7241-4506-8ea0-2999f81435c4 req-21afead0-88e7-46e0-802e-5e202967e3c1 service nova] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1466.534710] env[63371]: DEBUG nova.compute.manager [req-ff89042e-7241-4506-8ea0-2999f81435c4 req-21afead0-88e7-46e0-802e-5e202967e3c1 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] No waiting events found dispatching network-vif-plugged-8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1466.534757] env[63371]: WARNING nova.compute.manager [req-ff89042e-7241-4506-8ea0-2999f81435c4 req-21afead0-88e7-46e0-802e-5e202967e3c1 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received unexpected event network-vif-plugged-8e143eba-fc86-4474-91f7-a5785bb2dbe3 for instance with vm_state active and task_state None. [ 1466.596476] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773924, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085998} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1466.596676] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1466.597795] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8fba7b-1b15-4e5d-b05b-f3d5cbb709fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.622278] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 64fc862c-a755-4cac-997b-7a8328638269/64fc862c-a755-4cac-997b-7a8328638269.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1466.622717] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-271c206f-ad4d-485c-87ca-53453d9884fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.650267] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1466.650267] env[63371]: value = "task-1773926" [ 1466.650267] env[63371]: _type = "Task" [ 1466.650267] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.662239] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773926, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1466.903623] env[63371]: DEBUG nova.network.neutron [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8e143eba-fc86-4474-91f7-a5785bb2dbe3", "address": "fa:16:3e:4e:95:62", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e143eba-fc", "ovs_interfaceid": "8e143eba-fc86-4474-91f7-a5785bb2dbe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1466.946409] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "refresh_cache-dcf8063b-56eb-439c-bee5-139a1e157714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1466.946409] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "refresh_cache-dcf8063b-56eb-439c-bee5-139a1e157714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1466.946771] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1466.972835] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773925, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.018924] env[63371]: DEBUG nova.compute.manager [req-a4d51ff1-99f2-410c-a130-73927fdfefa9 req-5b3e909e-15ea-44c0-bc43-73b0fab2c0d0 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Received event network-vif-plugged-9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1467.021784] env[63371]: DEBUG oslo_concurrency.lockutils [req-a4d51ff1-99f2-410c-a130-73927fdfefa9 req-5b3e909e-15ea-44c0-bc43-73b0fab2c0d0 service nova] Acquiring lock "dcf8063b-56eb-439c-bee5-139a1e157714-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1467.021784] env[63371]: DEBUG oslo_concurrency.lockutils [req-a4d51ff1-99f2-410c-a130-73927fdfefa9 req-5b3e909e-15ea-44c0-bc43-73b0fab2c0d0 service nova] Lock "dcf8063b-56eb-439c-bee5-139a1e157714-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1467.021784] env[63371]: DEBUG oslo_concurrency.lockutils [req-a4d51ff1-99f2-410c-a130-73927fdfefa9 req-5b3e909e-15ea-44c0-bc43-73b0fab2c0d0 service nova] Lock "dcf8063b-56eb-439c-bee5-139a1e157714-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1467.021784] env[63371]: DEBUG nova.compute.manager [req-a4d51ff1-99f2-410c-a130-73927fdfefa9 req-5b3e909e-15ea-44c0-bc43-73b0fab2c0d0 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] No waiting events found dispatching network-vif-plugged-9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1467.021784] env[63371]: WARNING nova.compute.manager [req-a4d51ff1-99f2-410c-a130-73927fdfefa9 req-5b3e909e-15ea-44c0-bc43-73b0fab2c0d0 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Received unexpected event network-vif-plugged-9661bc17-8fdd-42bf-ae5d-bfa211e88e4a for instance with vm_state building and task_state spawning. [ 1467.165170] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773926, 'name': ReconfigVM_Task, 'duration_secs': 0.304594} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.168535] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 64fc862c-a755-4cac-997b-7a8328638269/64fc862c-a755-4cac-997b-7a8328638269.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1467.168768] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4708f736-6e42-48f8-92f1-f278fa5e59e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.178203] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1467.178203] env[63371]: value = "task-1773927" [ 1467.178203] env[63371]: _type = "Task" [ 1467.178203] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.190942] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773927, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.329573] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-169b54f3-5f1e-4e80-87a5-7452ef56a7e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.338936] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41057678-eb45-408e-ab33-25b7cf062d45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.371053] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97809567-e4a0-4275-9555-c470706f0e8e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.379991] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c709e9bd-b39a-455c-b0ac-f2b930a6b269 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.394476] env[63371]: DEBUG nova.compute.provider_tree [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1467.407279] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1467.407937] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1467.408113] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.409226] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5e17611-674b-42c4-852e-15eb41ccb579 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.427588] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1467.427588] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1467.427588] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1467.427839] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1467.427839] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1467.427966] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1467.429094] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1467.429094] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1467.429094] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1467.429094] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1467.429094] env[63371]: DEBUG nova.virt.hardware [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1467.436745] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Reconfiguring VM to attach interface {{(pid=63371) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1467.436745] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73735527-1f9a-4f70-b4f4-de5663feea03 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.454721] env[63371]: DEBUG oslo_vmware.api [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1467.454721] env[63371]: value = "task-1773928" [ 1467.454721] env[63371]: _type = "Task" [ 1467.454721] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.468882] env[63371]: DEBUG oslo_vmware.api [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773925, 'name': PowerOnVM_Task, 'duration_secs': 0.636102} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.471929] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1467.472158] env[63371]: INFO nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Took 13.47 seconds to spawn the instance on the hypervisor. [ 1467.472344] env[63371]: DEBUG nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1467.472678] env[63371]: DEBUG oslo_vmware.api [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773928, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.473389] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92b1a94e-5ee9-44f1-a28e-5b74e613c3f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.484208] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1467.691295] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773927, 'name': Rename_Task, 'duration_secs': 0.165582} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.691295] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1467.691295] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5658bbd7-91ec-434e-899c-e3ea2aeba12f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.700295] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1467.700295] env[63371]: value = "task-1773929" [ 1467.700295] env[63371]: _type = "Task" [ 1467.700295] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.700295] env[63371]: DEBUG nova.network.neutron [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Updating instance_info_cache with network_info: [{"id": "9661bc17-8fdd-42bf-ae5d-bfa211e88e4a", "address": "fa:16:3e:12:54:75", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9661bc17-8f", "ovs_interfaceid": "9661bc17-8fdd-42bf-ae5d-bfa211e88e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1467.711435] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773929, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.898290] env[63371]: DEBUG nova.scheduler.client.report [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1467.967629] env[63371]: DEBUG oslo_vmware.api [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773928, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.994595] env[63371]: INFO nova.compute.manager [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Took 43.32 seconds to build instance. [ 1468.203328] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "refresh_cache-dcf8063b-56eb-439c-bee5-139a1e157714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.203328] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Instance network_info: |[{"id": "9661bc17-8fdd-42bf-ae5d-bfa211e88e4a", "address": "fa:16:3e:12:54:75", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9661bc17-8f", "ovs_interfaceid": "9661bc17-8fdd-42bf-ae5d-bfa211e88e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1468.204405] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:54:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '842f738f-eaa4-4444-a9bf-90d2b533184c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9661bc17-8fdd-42bf-ae5d-bfa211e88e4a', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1468.212097] env[63371]: DEBUG oslo.service.loopingcall [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1468.216806] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1468.217190] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17b815da-a34b-471a-9d2b-1fda7eef1e3b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.240104] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773929, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.241603] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1468.241603] env[63371]: value = "task-1773931" [ 1468.241603] env[63371]: _type = "Task" [ 1468.241603] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.251348] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773931, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.403865] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.404428] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1468.407930] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 24.793s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.408130] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.408313] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1468.408672] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.725s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.417162] env[63371]: INFO nova.compute.claims [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1468.418019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0c4c17-2ac4-49c7-9f14-e90ad7b16bfb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.431623] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747b7c7b-1116-49fc-bc31-0b138e00f717 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.450655] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4893a758-1745-4e19-af40-6c96002d3789 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.471013] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b636c8-4035-441e-8e25-b60fda24b5a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.482911] env[63371]: DEBUG oslo_vmware.api [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773928, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.519583] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c98a882f-4381-43c7-be7f-e99cf37e291e tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.719s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1468.520892] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178801MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1468.520892] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.718020] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773929, 'name': PowerOnVM_Task, 'duration_secs': 0.638703} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.718020] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1468.718020] env[63371]: INFO nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Took 11.84 seconds to spawn the instance on the hypervisor. [ 1468.718020] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1468.718020] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0adf394b-8574-4095-9b17-38b987f531f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.752318] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773931, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.781530] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "574121c4-c721-4d30-81ec-3f2310a7b6d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1468.782167] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1468.919389] env[63371]: DEBUG nova.compute.utils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1468.920884] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1468.921071] env[63371]: DEBUG nova.network.neutron [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1468.968183] env[63371]: DEBUG oslo_vmware.api [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773928, 'name': ReconfigVM_Task, 'duration_secs': 1.153003} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.969649] env[63371]: DEBUG nova.policy [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a57fe97ed9414622a09d1d59a9eff8bc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '00a8f1cf54a6426b9980b16b17283b19', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1468.971542] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1468.971611] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Reconfigured VM to attach interface {{(pid=63371) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1469.023210] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1469.237548] env[63371]: INFO nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Took 43.38 seconds to build instance. [ 1469.253861] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773931, 'name': CreateVM_Task, 'duration_secs': 0.529809} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.253861] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1469.254733] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.254894] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.255248] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1469.256102] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcc149d7-f7b3-4c4b-86f0-ac141ab26f98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.262269] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1469.262269] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c2bbcc-0fcf-1025-0a0d-4eb91557b449" [ 1469.262269] env[63371]: _type = "Task" [ 1469.262269] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.271296] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c2bbcc-0fcf-1025-0a0d-4eb91557b449, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.352213] env[63371]: DEBUG nova.network.neutron [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Successfully created port: 5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1469.386115] env[63371]: DEBUG nova.compute.manager [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-changed-8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1469.386322] env[63371]: DEBUG nova.compute.manager [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Refreshing instance network info cache due to event network-changed-8e143eba-fc86-4474-91f7-a5785bb2dbe3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1469.386543] env[63371]: DEBUG oslo_concurrency.lockutils [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] Acquiring lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.386754] env[63371]: DEBUG oslo_concurrency.lockutils [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] Acquired lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.386941] env[63371]: DEBUG nova.network.neutron [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Refreshing network info cache for port 8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1469.425357] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1469.479190] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c2359343-f234-46e0-9a62-1c5e0cc8687c tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.334s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.548720] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1469.641504] env[63371]: DEBUG nova.compute.manager [None req-dc7c3fd3-2556-44a6-bb03-17cff56a52ac tempest-ServerDiagnosticsV248Test-122985939 tempest-ServerDiagnosticsV248Test-122985939-project-admin] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1469.643273] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad434f9-026b-4a14-addc-1594661a5eab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.655881] env[63371]: INFO nova.compute.manager [None req-dc7c3fd3-2556-44a6-bb03-17cff56a52ac tempest-ServerDiagnosticsV248Test-122985939 tempest-ServerDiagnosticsV248Test-122985939-project-admin] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Retrieving diagnostics [ 1469.656763] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794f1bf3-3d4e-4086-a15c-a66d2083b832 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.740641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "64fc862c-a755-4cac-997b-7a8328638269" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 99.734s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1469.774014] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c2bbcc-0fcf-1025-0a0d-4eb91557b449, 'name': SearchDatastore_Task, 'duration_secs': 0.018466} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.776594] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1469.776862] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1469.777109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1469.777257] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1469.777456] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1469.777926] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cb3fc4d-6fa1-4965-a5f6-660d9e7826c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.787861] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1469.788082] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1469.791287] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16672559-2c77-4a4f-b4c9-855c0f0916ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.797213] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1469.797213] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52db39ca-bb87-83a7-0272-b58d6fc00cf9" [ 1469.797213] env[63371]: _type = "Task" [ 1469.797213] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.804940] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52db39ca-bb87-83a7-0272-b58d6fc00cf9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.842669] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1469.842974] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368317', 'volume_id': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'name': 'volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '44cc8606-24f5-4f6b-b96f-3559c9c3f06e', 'attached_at': '', 'detached_at': '', 'volume_id': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'serial': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1469.843849] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1735033-4318-4dba-b6f3-39e546b200c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.865441] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-235f5362-c0c3-4dfe-be05-1c3633e99c69 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.890238] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4/volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1469.894834] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-274f529b-f7c7-401b-aa0a-6ed56d19d26b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.913885] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1469.913885] env[63371]: value = "task-1773932" [ 1469.913885] env[63371]: _type = "Task" [ 1469.913885] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.925234] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773932, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.010728] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa768438-c7e2-42c0-bdf3-1087a521e897 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.019194] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e99babf-e178-4601-9735-297950ecb520 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.056858] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0df5692c-8662-40c7-9fea-5d19a5c37b45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.067411] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1bb6ab2-907c-4c9a-87b3-abcccc913d78 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.087558] env[63371]: DEBUG nova.compute.provider_tree [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1470.166254] env[63371]: DEBUG nova.compute.manager [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Received event network-changed-9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1470.166431] env[63371]: DEBUG nova.compute.manager [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Refreshing instance network info cache due to event network-changed-9661bc17-8fdd-42bf-ae5d-bfa211e88e4a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1470.167396] env[63371]: DEBUG oslo_concurrency.lockutils [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] Acquiring lock "refresh_cache-dcf8063b-56eb-439c-bee5-139a1e157714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1470.167572] env[63371]: DEBUG oslo_concurrency.lockutils [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] Acquired lock "refresh_cache-dcf8063b-56eb-439c-bee5-139a1e157714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1470.167747] env[63371]: DEBUG nova.network.neutron [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Refreshing network info cache for port 9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1470.170270] env[63371]: DEBUG nova.network.neutron [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updated VIF entry in instance network info cache for port 8e143eba-fc86-4474-91f7-a5785bb2dbe3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1470.170718] env[63371]: DEBUG nova.network.neutron [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "8e143eba-fc86-4474-91f7-a5785bb2dbe3", "address": "fa:16:3e:4e:95:62", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e143eba-fc", "ovs_interfaceid": "8e143eba-fc86-4474-91f7-a5785bb2dbe3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1470.243808] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1470.305539] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b8c048-9182-1a2e-6ee5-cf1c575dcf3e/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1470.306697] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562df63d-62c5-4f46-ac16-ec30029d0ead {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.314076] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b8c048-9182-1a2e-6ee5-cf1c575dcf3e/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1470.314263] env[63371]: ERROR oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b8c048-9182-1a2e-6ee5-cf1c575dcf3e/disk-0.vmdk due to incomplete transfer. [ 1470.318486] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-73625c23-9296-4b00-aa40-7a41d96e078b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.319762] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52db39ca-bb87-83a7-0272-b58d6fc00cf9, 'name': SearchDatastore_Task, 'duration_secs': 0.037757} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.321047] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d4af369-ffb6-43c3-ba42-eebb9cc7ae29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.326675] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1470.326675] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523cdfaa-b2a7-2213-305d-8e79378ce76b" [ 1470.326675] env[63371]: _type = "Task" [ 1470.326675] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.327979] env[63371]: DEBUG oslo_vmware.rw_handles [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b8c048-9182-1a2e-6ee5-cf1c575dcf3e/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1470.328331] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Uploaded image d6a027d0-1605-4385-9e91-38b4326d06e7 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1470.331009] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1470.334992] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-199de9e0-8268-4f1e-b896-f225d8f6b0f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.341848] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523cdfaa-b2a7-2213-305d-8e79378ce76b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.343342] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1470.343342] env[63371]: value = "task-1773933" [ 1470.343342] env[63371]: _type = "Task" [ 1470.343342] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.352490] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773933, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.423801] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773932, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.438267] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1470.460721] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1470.461007] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1470.461179] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1470.461364] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1470.461590] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1470.461743] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1470.461950] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1470.462130] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1470.462302] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1470.462467] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1470.462657] env[63371]: DEBUG nova.virt.hardware [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1470.463583] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a36f0be-c3c1-42e6-9aee-30a97e077439 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.471997] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70327044-7a68-4602-b8d3-36e040802dd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.591168] env[63371]: DEBUG nova.scheduler.client.report [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1470.673019] env[63371]: DEBUG oslo_concurrency.lockutils [req-3616c5d2-5cb6-40a4-90e3-8dd59b39d56a req-631a693c-d451-48ce-8fa2-e8d29cf2c02b service nova] Releasing lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.777209] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.841222] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523cdfaa-b2a7-2213-305d-8e79378ce76b, 'name': SearchDatastore_Task, 'duration_secs': 0.023555} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.841222] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1470.841222] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] dcf8063b-56eb-439c-bee5-139a1e157714/dcf8063b-56eb-439c-bee5-139a1e157714.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1470.841222] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9df852ad-b74d-4ac1-88a5-9654efcfddbb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.848479] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1470.848479] env[63371]: value = "task-1773934" [ 1470.848479] env[63371]: _type = "Task" [ 1470.848479] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.854526] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773933, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.859394] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.926065] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773932, 'name': ReconfigVM_Task, 'duration_secs': 0.630982} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.929310] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Reconfigured VM instance instance-0000001f to attach disk [datastore1] volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4/volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1470.935016] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d1890a8b-2963-4ea2-bbc6-68bcf131c8a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.953502] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1470.953502] env[63371]: value = "task-1773935" [ 1470.953502] env[63371]: _type = "Task" [ 1470.953502] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.962527] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773935, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.993878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1470.994261] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1470.994439] env[63371]: INFO nova.compute.manager [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Rebooting instance [ 1471.009748] env[63371]: DEBUG nova.network.neutron [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Updated VIF entry in instance network info cache for port 9661bc17-8fdd-42bf-ae5d-bfa211e88e4a. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1471.010158] env[63371]: DEBUG nova.network.neutron [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Updating instance_info_cache with network_info: [{"id": "9661bc17-8fdd-42bf-ae5d-bfa211e88e4a", "address": "fa:16:3e:12:54:75", "network": {"id": "673f1a5c-f9b6-4b37-9034-ad6fd707d900", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-953059622-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "28cc236260a947899c5e09bca25f7360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "842f738f-eaa4-4444-a9bf-90d2b533184c", "external-id": "nsx-vlan-transportzone-460", "segmentation_id": 460, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9661bc17-8f", "ovs_interfaceid": "9661bc17-8fdd-42bf-ae5d-bfa211e88e4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1471.026670] env[63371]: DEBUG nova.network.neutron [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Successfully updated port: 5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1471.099153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.690s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.099504] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1471.102556] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.250s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.104143] env[63371]: INFO nova.compute.claims [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1471.357263] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773933, 'name': Destroy_Task, 'duration_secs': 0.896785} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.360771] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Destroyed the VM [ 1471.361188] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1471.361506] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773934, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.362064] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f50fea1d-e46f-40dd-9c55-1df567096014 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.370684] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1471.370684] env[63371]: value = "task-1773936" [ 1471.370684] env[63371]: _type = "Task" [ 1471.370684] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.381519] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773936, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.391355] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "50d5eac1-0752-4089-948c-b04439df6f6c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.391502] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "50d5eac1-0752-4089-948c-b04439df6f6c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.391705] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "50d5eac1-0752-4089-948c-b04439df6f6c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.392369] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "50d5eac1-0752-4089-948c-b04439df6f6c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.392369] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "50d5eac1-0752-4089-948c-b04439df6f6c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.394172] env[63371]: INFO nova.compute.manager [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Terminating instance [ 1471.395986] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "refresh_cache-50d5eac1-0752-4089-948c-b04439df6f6c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.396266] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquired lock "refresh_cache-50d5eac1-0752-4089-948c-b04439df6f6c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.396461] env[63371]: DEBUG nova.network.neutron [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1471.464461] env[63371]: DEBUG oslo_vmware.api [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1773935, 'name': ReconfigVM_Task, 'duration_secs': 0.162876} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.464763] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368317', 'volume_id': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'name': 'volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '44cc8606-24f5-4f6b-b96f-3559c9c3f06e', 'attached_at': '', 'detached_at': '', 'volume_id': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'serial': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1471.512702] env[63371]: DEBUG oslo_concurrency.lockutils [req-d178acf8-0215-475d-aec9-9578a038fe41 req-20b99de0-7b4b-4a30-b83b-41b00fad6985 service nova] Releasing lock "refresh_cache-dcf8063b-56eb-439c-bee5-139a1e157714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1471.518802] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.520688] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquired lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.520688] env[63371]: DEBUG nova.network.neutron [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1471.530232] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "refresh_cache-aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.530232] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquired lock "refresh_cache-aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1471.530232] env[63371]: DEBUG nova.network.neutron [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1471.535377] env[63371]: DEBUG nova.compute.manager [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Received event network-vif-plugged-5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1471.535377] env[63371]: DEBUG oslo_concurrency.lockutils [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] Acquiring lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1471.535377] env[63371]: DEBUG oslo_concurrency.lockutils [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1471.535377] env[63371]: DEBUG oslo_concurrency.lockutils [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1471.535377] env[63371]: DEBUG nova.compute.manager [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] No waiting events found dispatching network-vif-plugged-5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1471.535377] env[63371]: WARNING nova.compute.manager [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Received unexpected event network-vif-plugged-5f6d168b-1bd3-4bdd-9693-ee62c25e8666 for instance with vm_state building and task_state spawning. [ 1471.535377] env[63371]: DEBUG nova.compute.manager [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Received event network-changed-5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1471.537281] env[63371]: DEBUG nova.compute.manager [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Refreshing instance network info cache due to event network-changed-5f6d168b-1bd3-4bdd-9693-ee62c25e8666. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1471.537755] env[63371]: DEBUG oslo_concurrency.lockutils [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] Acquiring lock "refresh_cache-aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1471.610732] env[63371]: DEBUG nova.compute.utils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1471.615523] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1471.615523] env[63371]: DEBUG nova.network.neutron [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1471.714649] env[63371]: DEBUG nova.policy [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '01c1437e43364f0ba8db6677fe2ed978', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3fa37041acf4211987c97c105c47cf0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1471.859660] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773934, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.699297} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.862608] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] dcf8063b-56eb-439c-bee5-139a1e157714/dcf8063b-56eb-439c-bee5-139a1e157714.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1471.862608] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1471.862608] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b8275f6f-a245-41a7-a201-b2386cf6431e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.869039] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1471.869039] env[63371]: value = "task-1773937" [ 1471.869039] env[63371]: _type = "Task" [ 1471.869039] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.878949] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773936, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.882362] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773937, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.918756] env[63371]: DEBUG nova.network.neutron [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1471.981029] env[63371]: DEBUG nova.network.neutron [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.071911] env[63371]: DEBUG nova.network.neutron [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1472.115688] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1472.126454] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "interface-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-8e143eba-fc86-4474-91f7-a5785bb2dbe3" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1472.126703] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-8e143eba-fc86-4474-91f7-a5785bb2dbe3" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1472.258372] env[63371]: DEBUG nova.network.neutron [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Successfully created port: 96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1472.382013] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773937, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070028} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.382781] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1472.383568] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6ed915-937c-47bd-9e34-aa0e3d5b0321 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.390531] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773936, 'name': RemoveSnapshot_Task} progress is 74%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.413144] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] dcf8063b-56eb-439c-bee5-139a1e157714/dcf8063b-56eb-439c-bee5-139a1e157714.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1472.416618] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cafb811-876e-44e4-93b8-e92c54438321 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.439485] env[63371]: DEBUG nova.network.neutron [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Updating instance_info_cache with network_info: [{"id": "5f6d168b-1bd3-4bdd-9693-ee62c25e8666", "address": "fa:16:3e:79:87:94", "network": {"id": "c6b502a7-b302-4a03-a23e-3a32b446d367", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-615984944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a8f1cf54a6426b9980b16b17283b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f6d168b-1b", "ovs_interfaceid": "5f6d168b-1bd3-4bdd-9693-ee62c25e8666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.450396] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1472.450396] env[63371]: value = "task-1773938" [ 1472.450396] env[63371]: _type = "Task" [ 1472.450396] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.462800] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773938, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.484663] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Releasing lock "refresh_cache-50d5eac1-0752-4089-948c-b04439df6f6c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.485079] env[63371]: DEBUG nova.compute.manager [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1472.485393] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1472.486134] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82dc79f6-a6a6-48c7-9ffd-e0b908f23110 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.494056] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1472.496842] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b44da852-b05d-4196-b45b-2b425a14bcd7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.503229] env[63371]: DEBUG oslo_vmware.api [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1472.503229] env[63371]: value = "task-1773939" [ 1472.503229] env[63371]: _type = "Task" [ 1472.503229] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.515318] env[63371]: DEBUG oslo_vmware.api [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773939, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.516179] env[63371]: DEBUG nova.network.neutron [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Updating instance_info_cache with network_info: [{"id": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "address": "fa:16:3e:b4:74:9c", "network": {"id": "718e3616-e606-482f-90b3-aaac39f38b39", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-354664269-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1771acadeced40a6889b7dfb974e7886", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "257e5ea7-8b80-4301-9900-a754f1fe2031", "external-id": "nsx-vlan-transportzone-682", "segmentation_id": 682, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap78d7a9b4-25", "ovs_interfaceid": "78d7a9b4-2512-4b55-95e3-50aa146658fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1472.519420] env[63371]: DEBUG nova.objects.instance [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lazy-loading 'flavor' on Instance uuid 44cc8606-24f5-4f6b-b96f-3559c9c3f06e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1472.628950] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1472.630008] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.632185] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b62c5d88-63ae-4db3-a487-a8e6c671b484 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.656087] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33567575-079c-47d0-b541-00d8e1e774a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.690694] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Reconfiguring VM to detach interface {{(pid=63371) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1472.693921] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1802f9b1-49fd-4aea-8d19-76c6f1527e9f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.713162] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1472.713162] env[63371]: value = "task-1773940" [ 1472.713162] env[63371]: _type = "Task" [ 1472.713162] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1472.725056] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1472.883993] env[63371]: DEBUG oslo_vmware.api [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773936, 'name': RemoveSnapshot_Task, 'duration_secs': 1.438082} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.888564] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1472.888564] env[63371]: INFO nova.compute.manager [None req-dcc1853f-811b-4501-99d6-6ca0ef5ff834 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Took 23.48 seconds to snapshot the instance on the hypervisor. [ 1472.917056] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641a0280-6d08-4cab-9440-c560b59ac5fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.924963] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a4902b-40ae-458a-a413-386f5b86bcde {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.958821] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Releasing lock "refresh_cache-aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1472.959176] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Instance network_info: |[{"id": "5f6d168b-1bd3-4bdd-9693-ee62c25e8666", "address": "fa:16:3e:79:87:94", "network": {"id": "c6b502a7-b302-4a03-a23e-3a32b446d367", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-615984944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a8f1cf54a6426b9980b16b17283b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f6d168b-1b", "ovs_interfaceid": "5f6d168b-1bd3-4bdd-9693-ee62c25e8666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1472.959794] env[63371]: DEBUG oslo_concurrency.lockutils [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] Acquired lock "refresh_cache-aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1472.960303] env[63371]: DEBUG nova.network.neutron [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Refreshing network info cache for port 5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1472.961581] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:87:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c6a4836-66dc-4e43-982b-f8fcd3f9989a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f6d168b-1bd3-4bdd-9693-ee62c25e8666', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1472.970231] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Creating folder: Project (00a8f1cf54a6426b9980b16b17283b19). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1472.973844] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a70563-25c4-4f90-951c-0ef46584b7cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.979582] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b446c28-bce4-4fe9-8f01-15ab7d34e516 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.990739] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d47345-f44b-462d-84d0-edd264e10496 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.994871] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773938, 'name': ReconfigVM_Task, 'duration_secs': 0.395276} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1472.996129] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Reconfigured VM instance instance-0000002a to attach disk [datastore1] dcf8063b-56eb-439c-bee5-139a1e157714/dcf8063b-56eb-439c-bee5-139a1e157714.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1472.996772] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Created folder: Project (00a8f1cf54a6426b9980b16b17283b19) in parent group-v368199. [ 1472.996948] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Creating folder: Instances. Parent ref: group-v368319. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1472.997496] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce5b87d7-7212-4073-a33c-cd29c0cd1bfe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.999434] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d1744a7f-064a-419b-81aa-3cf4e846f6db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.013629] env[63371]: DEBUG nova.compute.provider_tree [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1473.017857] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1473.017857] env[63371]: value = "task-1773942" [ 1473.017857] env[63371]: _type = "Task" [ 1473.017857] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.021057] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Releasing lock "refresh_cache-be37eb1c-8582-4446-afd6-ae11a8cadf95" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1473.022684] env[63371]: DEBUG oslo_vmware.api [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773939, 'name': PowerOffVM_Task, 'duration_secs': 0.138875} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.024646] env[63371]: DEBUG nova.compute.manager [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1473.029334] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1473.029334] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1473.029563] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Created folder: Instances in parent group-v368319. [ 1473.029772] env[63371]: DEBUG oslo.service.loopingcall [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1473.030754] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080a4d45-b436-4ef2-a348-baa88a6ac05a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.033838] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b9c8bc70-8b92-4326-87f5-c481bf31d3b2 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.822s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1473.034763] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-708ab892-ff8b-4675-a89d-cbbb489c4e0a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.036071] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1473.037223] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-680ecf24-9d5c-4ca2-bfaf-161a365c3768 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.057279] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773942, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.064875] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1473.064875] env[63371]: value = "task-1773945" [ 1473.064875] env[63371]: _type = "Task" [ 1473.064875] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.073926] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773945, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.082939] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1473.083165] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1473.083584] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Deleting the datastore file [datastore1] 50d5eac1-0752-4089-948c-b04439df6f6c {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1473.083584] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8b45aea-7c02-47ed-bb54-358ad22ae7fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.090613] env[63371]: DEBUG oslo_vmware.api [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for the task: (returnval){ [ 1473.090613] env[63371]: value = "task-1773946" [ 1473.090613] env[63371]: _type = "Task" [ 1473.090613] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.099368] env[63371]: DEBUG oslo_vmware.api [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773946, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.128582] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1473.160683] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1473.161228] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1473.161578] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1473.161925] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1473.162210] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1473.162487] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1473.162842] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1473.163897] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1473.163897] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1473.163897] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1473.163897] env[63371]: DEBUG nova.virt.hardware [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1473.165044] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a08c7b-6dba-4df1-a704-e8b205098e3c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.175621] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a29f5fb-02da-426d-a52b-ec12dfd4b132 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.223914] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.518152] env[63371]: DEBUG nova.scheduler.client.report [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1473.530649] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773942, 'name': Rename_Task, 'duration_secs': 0.217738} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.530914] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1473.531175] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-709634b6-2204-4cd3-bb8b-32dcb535761d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.540940] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1473.540940] env[63371]: value = "task-1773947" [ 1473.540940] env[63371]: _type = "Task" [ 1473.540940] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.549907] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773947, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.577320] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773945, 'name': CreateVM_Task, 'duration_secs': 0.364019} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.577486] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1473.578162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1473.578377] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1473.578696] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1473.579214] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-835cc217-7c19-45de-83c5-644d13c9bfdc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1473.583638] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1473.583638] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c49104-2224-9478-fb5a-0d2e51712dca" [ 1473.583638] env[63371]: _type = "Task" [ 1473.583638] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1473.584392] env[63371]: DEBUG nova.network.neutron [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Updated VIF entry in instance network info cache for port 5f6d168b-1bd3-4bdd-9693-ee62c25e8666. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1473.584715] env[63371]: DEBUG nova.network.neutron [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Updating instance_info_cache with network_info: [{"id": "5f6d168b-1bd3-4bdd-9693-ee62c25e8666", "address": "fa:16:3e:79:87:94", "network": {"id": "c6b502a7-b302-4a03-a23e-3a32b446d367", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-615984944-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "00a8f1cf54a6426b9980b16b17283b19", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f6d168b-1b", "ovs_interfaceid": "5f6d168b-1bd3-4bdd-9693-ee62c25e8666", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1473.594972] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c49104-2224-9478-fb5a-0d2e51712dca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1473.603534] env[63371]: DEBUG oslo_vmware.api [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Task: {'id': task-1773946, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166964} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1473.603772] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1473.603946] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1473.604128] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1473.604291] env[63371]: INFO nova.compute.manager [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1473.604514] env[63371]: DEBUG oslo.service.loopingcall [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1473.604905] env[63371]: DEBUG nova.compute.manager [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1473.605046] env[63371]: DEBUG nova.network.neutron [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1473.638074] env[63371]: DEBUG nova.network.neutron [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1473.724620] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.026673] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.924s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.027232] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1474.030512] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.942s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.030746] env[63371]: DEBUG nova.objects.instance [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lazy-loading 'resources' on Instance uuid b48a8e83-e581-4886-833b-bbce155d40d9 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1474.053087] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773947, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.074162] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7b9864-c3de-4a71-a9a5-5ef254ff9917 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.081588] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Doing hard reboot of VM {{(pid=63371) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1474.082064] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-1053944c-3bab-45fc-a566-b84ecfa3bed6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.090370] env[63371]: DEBUG oslo_concurrency.lockutils [req-a80c9c98-ea12-4538-a3ef-cef1cedb3408 req-1ea26c59-2ae8-49cf-bca6-d3d0db14ceca service nova] Releasing lock "refresh_cache-aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.090796] env[63371]: DEBUG oslo_vmware.api [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1474.090796] env[63371]: value = "task-1773948" [ 1474.090796] env[63371]: _type = "Task" [ 1474.090796] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.097363] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c49104-2224-9478-fb5a-0d2e51712dca, 'name': SearchDatastore_Task, 'duration_secs': 0.04635} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.097985] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1474.098281] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1474.098571] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.098918] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.099084] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1474.105954] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-545e1581-5bb0-4440-bf7e-c26dcaed1778 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.111025] env[63371]: DEBUG oslo_vmware.api [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773948, 'name': ResetVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.118745] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1474.118993] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1474.119886] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c0d70c3-781f-4de0-8b0e-962244cdac1f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.127464] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1474.127464] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b3562d-8039-630f-5f37-46b3c2ce933f" [ 1474.127464] env[63371]: _type = "Task" [ 1474.127464] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.135134] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b3562d-8039-630f-5f37-46b3c2ce933f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.140557] env[63371]: DEBUG nova.network.neutron [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1474.225569] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.465295] env[63371]: DEBUG nova.network.neutron [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Successfully updated port: 96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1474.534773] env[63371]: DEBUG nova.compute.utils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1474.541889] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1474.541889] env[63371]: DEBUG nova.network.neutron [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1474.544822] env[63371]: DEBUG nova.compute.manager [req-5e9fdf2e-d50a-41ba-8073-27c5e69f5844 req-c8b38373-a455-43a6-bdda-c88285b6c9d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Received event network-vif-plugged-96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1474.544822] env[63371]: DEBUG oslo_concurrency.lockutils [req-5e9fdf2e-d50a-41ba-8073-27c5e69f5844 req-c8b38373-a455-43a6-bdda-c88285b6c9d1 service nova] Acquiring lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.544822] env[63371]: DEBUG oslo_concurrency.lockutils [req-5e9fdf2e-d50a-41ba-8073-27c5e69f5844 req-c8b38373-a455-43a6-bdda-c88285b6c9d1 service nova] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.544822] env[63371]: DEBUG oslo_concurrency.lockutils [req-5e9fdf2e-d50a-41ba-8073-27c5e69f5844 req-c8b38373-a455-43a6-bdda-c88285b6c9d1 service nova] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1474.545043] env[63371]: DEBUG nova.compute.manager [req-5e9fdf2e-d50a-41ba-8073-27c5e69f5844 req-c8b38373-a455-43a6-bdda-c88285b6c9d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] No waiting events found dispatching network-vif-plugged-96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1474.545043] env[63371]: WARNING nova.compute.manager [req-5e9fdf2e-d50a-41ba-8073-27c5e69f5844 req-c8b38373-a455-43a6-bdda-c88285b6c9d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Received unexpected event network-vif-plugged-96760ebc-7de4-48e4-94ac-f0a3a2eab943 for instance with vm_state building and task_state spawning. [ 1474.560023] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773947, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.608507] env[63371]: DEBUG oslo_vmware.api [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773948, 'name': ResetVM_Task, 'duration_secs': 0.105612} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.608809] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Did hard reboot of VM {{(pid=63371) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1474.609012] env[63371]: DEBUG nova.compute.manager [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1474.610272] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a2e3f78-8be5-4901-9e1b-9be07d6294eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.641348] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b3562d-8039-630f-5f37-46b3c2ce933f, 'name': SearchDatastore_Task, 'duration_secs': 0.022302} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.645654] env[63371]: DEBUG nova.policy [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38c65e6dd9e4468fb1a0235bac086151', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4ca8a73414142d497ebd3d3f043d9ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1474.647900] env[63371]: INFO nova.compute.manager [-] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Took 1.04 seconds to deallocate network for instance. [ 1474.648630] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ced9b72-9928-4036-b04e-7e0e53afbe74 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.660025] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1474.660025] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523b4ac6-6ac7-4899-db6d-0704172cdeab" [ 1474.660025] env[63371]: _type = "Task" [ 1474.660025] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.677421] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523b4ac6-6ac7-4899-db6d-0704172cdeab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.730253] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.853439] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1474.853672] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1474.971193] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1474.971465] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1474.971536] env[63371]: DEBUG nova.network.neutron [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1475.046597] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1475.060029] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773947, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.084411] env[63371]: DEBUG nova.network.neutron [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Successfully created port: 9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1475.127802] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76569887-1854-4b5c-9293-f5781d2e4ff5 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.134s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.150264] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59398f8d-35b4-4403-93f7-40b12e15e51d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.158397] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.159665] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f84814-2073-43ee-9cf9-9484ab7b45fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.171984] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523b4ac6-6ac7-4899-db6d-0704172cdeab, 'name': SearchDatastore_Task, 'duration_secs': 0.021516} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.198322] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1475.198596] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf/aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1475.200525] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6c875a56-47e3-4df5-aa0a-0ff8b6d19e08 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.203082] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc134cd1-de44-49a3-b915-c2c78b1e28b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.216372] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ba841b-b499-46a8-8b9c-64b752e7ef2e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.221454] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1475.221454] env[63371]: value = "task-1773949" [ 1475.221454] env[63371]: _type = "Task" [ 1475.221454] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.236393] env[63371]: DEBUG nova.compute.provider_tree [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1475.243228] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.246192] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773949, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.509886] env[63371]: DEBUG nova.network.neutron [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1475.573818] env[63371]: DEBUG oslo_vmware.api [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773947, 'name': PowerOnVM_Task, 'duration_secs': 1.94103} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.574157] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1475.574765] env[63371]: INFO nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Took 9.98 seconds to spawn the instance on the hypervisor. [ 1475.574765] env[63371]: DEBUG nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1475.575939] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73d00593-1fca-433e-af0a-6f4d4f755b2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.710268] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.710268] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.710268] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "be37eb1c-8582-4446-afd6-ae11a8cadf95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1475.710493] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1475.710825] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1475.715660] env[63371]: INFO nova.compute.manager [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Terminating instance [ 1475.718083] env[63371]: DEBUG nova.compute.manager [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1475.718297] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1475.719640] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df179a4a-c9e5-4ccb-89fc-7ef9880f88da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.739238] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773949, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.749241] env[63371]: DEBUG nova.scheduler.client.report [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1475.753513] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.753816] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1475.755324] env[63371]: DEBUG nova.network.neutron [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [{"id": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "address": "fa:16:3e:cb:30:e4", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96760ebc-7d", "ovs_interfaceid": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1475.756636] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f2d9569-1cc0-4e27-be57-37aeaaa0fef2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1475.764719] env[63371]: DEBUG oslo_vmware.api [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1475.764719] env[63371]: value = "task-1773950" [ 1475.764719] env[63371]: _type = "Task" [ 1475.764719] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1475.778167] env[63371]: DEBUG oslo_vmware.api [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773950, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.066924] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1476.094609] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1476.094899] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1476.095010] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1476.095405] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1476.096493] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1476.096493] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1476.096493] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1476.096493] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1476.096493] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1476.096493] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1476.096822] env[63371]: DEBUG nova.virt.hardware [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1476.097399] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea740d8f-ab91-420f-ae37-c2a84676d251 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.102719] env[63371]: INFO nova.compute.manager [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Took 44.55 seconds to build instance. [ 1476.109486] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a137bdd-6cfb-4cb2-8749-ae4141e919e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.236408] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773949, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678029} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.239338] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf/aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1476.239585] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1476.239889] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.240083] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7f781568-9394-4a2c-ac85-b0840d525830 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.247174] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1476.247174] env[63371]: value = "task-1773951" [ 1476.247174] env[63371]: _type = "Task" [ 1476.247174] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.256479] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773951, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.257296] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.227s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.259451] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.567s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1476.261025] env[63371]: INFO nova.compute.claims [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1476.264018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1476.264185] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance network_info: |[{"id": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "address": "fa:16:3e:cb:30:e4", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96760ebc-7d", "ovs_interfaceid": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1476.264778] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:30:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca99f7a1-6365-4d3c-af16-1b1c1288091e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96760ebc-7de4-48e4-94ac-f0a3a2eab943', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1476.275207] env[63371]: DEBUG oslo.service.loopingcall [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.275207] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1476.277745] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-443f7662-2562-42fa-8ffa-9682c24d6713 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.296709] env[63371]: INFO nova.scheduler.client.report [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Deleted allocations for instance b48a8e83-e581-4886-833b-bbce155d40d9 [ 1476.302922] env[63371]: DEBUG oslo_vmware.api [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773950, 'name': PowerOffVM_Task, 'duration_secs': 0.391757} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.304485] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1476.304659] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1476.304912] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1476.304912] env[63371]: value = "task-1773952" [ 1476.304912] env[63371]: _type = "Task" [ 1476.304912] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.305665] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea692d67-af80-48ad-ada5-0336f993d10a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.316571] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773952, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.421495] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1476.421789] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1476.421996] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Deleting the datastore file [datastore1] be37eb1c-8582-4446-afd6-ae11a8cadf95 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1476.422362] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9719a79f-399f-40e2-90ef-58bf71a84bc5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.431037] env[63371]: DEBUG oslo_vmware.api [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for the task: (returnval){ [ 1476.431037] env[63371]: value = "task-1773954" [ 1476.431037] env[63371]: _type = "Task" [ 1476.431037] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.440035] env[63371]: DEBUG oslo_vmware.api [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.606771] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4a6b5ce8-8c74-4026-aacf-93c7e0ece521 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "dcf8063b-56eb-439c-bee5-139a1e157714" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 106.568s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.723141] env[63371]: DEBUG nova.compute.manager [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Received event network-changed-96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1476.723348] env[63371]: DEBUG nova.compute.manager [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Refreshing instance network info cache due to event network-changed-96760ebc-7de4-48e4-94ac-f0a3a2eab943. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1476.723562] env[63371]: DEBUG oslo_concurrency.lockutils [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] Acquiring lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.723702] env[63371]: DEBUG oslo_concurrency.lockutils [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] Acquired lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.725658] env[63371]: DEBUG nova.network.neutron [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Refreshing network info cache for port 96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1476.743415] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.757800] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773951, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121976} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.758513] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1476.762019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bbd581-eee3-4438-a5ad-302d5815c45e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.790427] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf/aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1476.795316] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff3320c8-6208-4a3b-85e0-cf8b6ce16e2d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.819669] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8523466a-da73-4fe9-9a0e-db1c0b8af0f1 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "b48a8e83-e581-4886-833b-bbce155d40d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.314s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1476.829766] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773952, 'name': CreateVM_Task, 'duration_secs': 0.42088} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.830968] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1476.831314] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1476.831314] env[63371]: value = "task-1773955" [ 1476.831314] env[63371]: _type = "Task" [ 1476.831314] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.831970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1476.832222] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.832478] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1476.832892] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21f9b217-8e03-411e-b48b-d97d3e286dc8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.843224] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1476.843224] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae055f-86be-3a11-1b70-d41466fde8bb" [ 1476.843224] env[63371]: _type = "Task" [ 1476.843224] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.847196] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773955, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.856996] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae055f-86be-3a11-1b70-d41466fde8bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.916344] env[63371]: DEBUG nova.network.neutron [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Successfully updated port: 9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1476.947833] env[63371]: DEBUG oslo_vmware.api [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Task: {'id': task-1773954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.460083} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.948348] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1476.948712] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1476.948712] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1476.948865] env[63371]: INFO nova.compute.manager [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1476.949148] env[63371]: DEBUG oslo.service.loopingcall [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1476.949338] env[63371]: DEBUG nova.compute.manager [-] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1476.949443] env[63371]: DEBUG nova.network.neutron [-] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1477.109367] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1477.249551] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.347786] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773955, 'name': ReconfigVM_Task, 'duration_secs': 0.284878} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.355113] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Reconfigured VM instance instance-0000002b to attach disk [datastore1] aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf/aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1477.356275] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e66b5194-6adf-49e5-b6e8-ab9c6ce5ba5b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.364180] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae055f-86be-3a11-1b70-d41466fde8bb, 'name': SearchDatastore_Task, 'duration_secs': 0.020969} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.365513] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1477.365761] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1477.366018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.366180] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.366365] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1477.366682] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1477.366682] env[63371]: value = "task-1773956" [ 1477.366682] env[63371]: _type = "Task" [ 1477.366682] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.366865] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90dc3e56-2934-4124-a9ab-f423d7dc254f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.381080] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773956, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.382140] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1477.382326] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1477.383088] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-132d0b24-e6c6-4e88-beb0-b0b308b2ec23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.388613] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1477.388613] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d2643e-2ada-c22c-2ece-852a67a7a356" [ 1477.388613] env[63371]: _type = "Task" [ 1477.388613] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.399514] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d2643e-2ada-c22c-2ece-852a67a7a356, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.421216] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3994fb0a-c787-4436-96c6-6274a7861b94 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.425713] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "refresh_cache-e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1477.425713] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "refresh_cache-e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.425713] env[63371]: DEBUG nova.network.neutron [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1477.430598] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f83ba5-3af2-40c7-9050-699424022ba4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.462316] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "64fc862c-a755-4cac-997b-7a8328638269" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.462425] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "64fc862c-a755-4cac-997b-7a8328638269" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.462574] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "64fc862c-a755-4cac-997b-7a8328638269-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.462760] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "64fc862c-a755-4cac-997b-7a8328638269-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.462924] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "64fc862c-a755-4cac-997b-7a8328638269-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.467569] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6ffcaa-9541-4603-a04b-d2580b408bc4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.470563] env[63371]: INFO nova.compute.manager [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Terminating instance [ 1477.472727] env[63371]: DEBUG nova.compute.manager [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1477.472920] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1477.474442] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6f3b70-373a-4ea7-a678-844c06f51345 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.483129] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54cc31cc-808d-4167-bc36-4d461f19be92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.489938] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1477.490351] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42347a9c-e88c-4537-8147-65cb950b9cb5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.500338] env[63371]: DEBUG nova.compute.provider_tree [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1477.507310] env[63371]: DEBUG oslo_vmware.api [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1477.507310] env[63371]: value = "task-1773957" [ 1477.507310] env[63371]: _type = "Task" [ 1477.507310] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.517510] env[63371]: DEBUG oslo_vmware.api [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.555451] env[63371]: DEBUG nova.network.neutron [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updated VIF entry in instance network info cache for port 96760ebc-7de4-48e4-94ac-f0a3a2eab943. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1477.555722] env[63371]: DEBUG nova.network.neutron [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [{"id": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "address": "fa:16:3e:cb:30:e4", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96760ebc-7d", "ovs_interfaceid": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.628077] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.698489] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "dcf8063b-56eb-439c-bee5-139a1e157714" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.698669] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "dcf8063b-56eb-439c-bee5-139a1e157714" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.698888] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "dcf8063b-56eb-439c-bee5-139a1e157714-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1477.699092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "dcf8063b-56eb-439c-bee5-139a1e157714-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1477.699266] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "dcf8063b-56eb-439c-bee5-139a1e157714-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1477.701418] env[63371]: INFO nova.compute.manager [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Terminating instance [ 1477.703197] env[63371]: DEBUG nova.compute.manager [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1477.703364] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1477.704236] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8585fa61-0bc2-40b3-8c78-426e642e552b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.712558] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1477.712776] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1fe4fea-cdef-4105-96fb-1667added149 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.719186] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1477.719186] env[63371]: value = "task-1773958" [ 1477.719186] env[63371]: _type = "Task" [ 1477.719186] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.727168] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773958, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.736333] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.750965] env[63371]: DEBUG nova.network.neutron [-] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1477.879674] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773956, 'name': Rename_Task, 'duration_secs': 0.155122} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.880584] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1477.880584] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87e25313-79df-44d6-b702-2ded5962c99b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.887302] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1477.887302] env[63371]: value = "task-1773959" [ 1477.887302] env[63371]: _type = "Task" [ 1477.887302] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.899110] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773959, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.903493] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d2643e-2ada-c22c-2ece-852a67a7a356, 'name': SearchDatastore_Task, 'duration_secs': 0.024302} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.904393] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5eddcfce-a637-4cae-9c6f-b77d171d64c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.910352] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1477.910352] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d4a26e-4660-2ecb-5e05-24523fcfe0b3" [ 1477.910352] env[63371]: _type = "Task" [ 1477.910352] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.919523] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d4a26e-4660-2ecb-5e05-24523fcfe0b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.962638] env[63371]: DEBUG nova.network.neutron [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1478.004076] env[63371]: DEBUG nova.scheduler.client.report [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1478.018872] env[63371]: DEBUG oslo_vmware.api [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773957, 'name': PowerOffVM_Task, 'duration_secs': 0.336813} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.019858] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1478.020052] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1478.020308] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7c03b6ae-ca99-447b-8536-7739a3caa1b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.058431] env[63371]: DEBUG oslo_concurrency.lockutils [req-c216926a-3829-4799-b2e9-a35a70b276a6 req-17489b8b-7302-48f1-bfdf-0a016575114f service nova] Releasing lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.065501] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "33cf00ea-3195-41cf-9b7a-a8e64496a122" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.065831] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.066339] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "33cf00ea-3195-41cf-9b7a-a8e64496a122-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.066611] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.066790] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.068979] env[63371]: INFO nova.compute.manager [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Terminating instance [ 1478.074274] env[63371]: DEBUG nova.compute.manager [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1478.074472] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1478.075656] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10b1eed-56f7-4300-af1f-2bc2d395e341 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.083965] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1478.084252] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-28a96417-ca08-49e6-8436-13a854336fb4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.094646] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1478.094646] env[63371]: value = "task-1773961" [ 1478.094646] env[63371]: _type = "Task" [ 1478.094646] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.095608] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1478.095807] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1478.095986] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleting the datastore file [datastore1] 64fc862c-a755-4cac-997b-7a8328638269 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.099221] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-008bf87c-f71f-4de8-b2fb-172b0a077c84 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.106330] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773961, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.107687] env[63371]: DEBUG oslo_vmware.api [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1478.107687] env[63371]: value = "task-1773962" [ 1478.107687] env[63371]: _type = "Task" [ 1478.107687] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.115343] env[63371]: DEBUG oslo_vmware.api [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773962, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.176858] env[63371]: DEBUG nova.network.neutron [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Updating instance_info_cache with network_info: [{"id": "9a55b6f2-f084-4989-9b8c-434c1a1deab6", "address": "fa:16:3e:eb:6a:af", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a55b6f2-f0", "ovs_interfaceid": "9a55b6f2-f084-4989-9b8c-434c1a1deab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1478.230242] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773958, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.239498] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.254026] env[63371]: INFO nova.compute.manager [-] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Took 1.30 seconds to deallocate network for instance. [ 1478.398653] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773959, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.422252] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d4a26e-4660-2ecb-5e05-24523fcfe0b3, 'name': SearchDatastore_Task, 'duration_secs': 0.011598} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.422527] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.422825] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7e66011a-4fed-471f-82ea-e1016f92ad39/7e66011a-4fed-471f-82ea-e1016f92ad39.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1478.423108] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5050eabc-1823-4e09-8307-d5c71953212e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.430029] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1478.430029] env[63371]: value = "task-1773963" [ 1478.430029] env[63371]: _type = "Task" [ 1478.430029] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.438826] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.514189] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.253s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.514189] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1478.516371] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.496s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.516644] env[63371]: DEBUG nova.objects.instance [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lazy-loading 'resources' on Instance uuid cfbd0c7c-243e-497a-acb1-ab9323c23574 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1478.606346] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773961, 'name': PowerOffVM_Task, 'duration_secs': 0.21064} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.606485] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1478.606621] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1478.606900] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-61af4894-ff87-487b-bb8c-f2a7721cd29e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.616469] env[63371]: DEBUG oslo_vmware.api [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773962, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171844} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.616708] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1478.616910] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1478.617105] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1478.617313] env[63371]: INFO nova.compute.manager [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1478.617570] env[63371]: DEBUG oslo.service.loopingcall [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.617766] env[63371]: DEBUG nova.compute.manager [-] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1478.617880] env[63371]: DEBUG nova.network.neutron [-] [instance: 64fc862c-a755-4cac-997b-7a8328638269] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1478.680121] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "refresh_cache-e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.680665] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Instance network_info: |[{"id": "9a55b6f2-f084-4989-9b8c-434c1a1deab6", "address": "fa:16:3e:eb:6a:af", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a55b6f2-f0", "ovs_interfaceid": "9a55b6f2-f084-4989-9b8c-434c1a1deab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1478.681323] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:6a:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a55b6f2-f084-4989-9b8c-434c1a1deab6', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1478.689524] env[63371]: DEBUG oslo.service.loopingcall [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1478.690172] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1478.690446] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e4f30ba-d30c-4cdb-8ff1-04b92c011d75 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.715710] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1478.716018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1478.716141] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Deleting the datastore file [datastore1] 33cf00ea-3195-41cf-9b7a-a8e64496a122 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.716404] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-33a9c3ff-9819-4ecb-93f9-65be03daa24a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.719922] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1478.719922] env[63371]: value = "task-1773965" [ 1478.719922] env[63371]: _type = "Task" [ 1478.719922] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.728095] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for the task: (returnval){ [ 1478.728095] env[63371]: value = "task-1773966" [ 1478.728095] env[63371]: _type = "Task" [ 1478.728095] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.738327] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773958, 'name': PowerOffVM_Task, 'duration_secs': 0.866059} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.738528] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773965, 'name': CreateVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.742203] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1478.742393] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1478.742681] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aafc2e5a-ca16-4b3c-97ff-8341001f41db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.750319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.750623] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.751723] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.751723] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.751723] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.752913] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.756783] env[63371]: DEBUG oslo_vmware.api [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773940, 'name': ReconfigVM_Task, 'duration_secs': 5.878851} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.757343] env[63371]: INFO nova.compute.manager [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Terminating instance [ 1478.759084] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1478.759308] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Reconfigured VM to detach interface {{(pid=63371) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1478.762446] env[63371]: DEBUG nova.compute.manager [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1478.762653] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1478.763725] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.764448] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5dc76ec-a1c5-41a0-ab32-963edc9177b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.774495] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1478.774807] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cee114bf-6602-419f-ac00-6d158bd3a8c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.783730] env[63371]: DEBUG oslo_vmware.api [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1478.783730] env[63371]: value = "task-1773968" [ 1478.783730] env[63371]: _type = "Task" [ 1478.783730] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.799137] env[63371]: DEBUG oslo_vmware.api [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773968, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.842786] env[63371]: DEBUG nova.compute.manager [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Received event network-vif-plugged-9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1478.843206] env[63371]: DEBUG oslo_concurrency.lockutils [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] Acquiring lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1478.843476] env[63371]: DEBUG oslo_concurrency.lockutils [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] Lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1478.843684] env[63371]: DEBUG oslo_concurrency.lockutils [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] Lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1478.843948] env[63371]: DEBUG nova.compute.manager [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] No waiting events found dispatching network-vif-plugged-9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1478.844024] env[63371]: WARNING nova.compute.manager [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Received unexpected event network-vif-plugged-9a55b6f2-f084-4989-9b8c-434c1a1deab6 for instance with vm_state building and task_state spawning. [ 1478.844226] env[63371]: DEBUG nova.compute.manager [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Received event network-changed-9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1478.844389] env[63371]: DEBUG nova.compute.manager [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Refreshing instance network info cache due to event network-changed-9a55b6f2-f084-4989-9b8c-434c1a1deab6. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1478.844577] env[63371]: DEBUG oslo_concurrency.lockutils [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] Acquiring lock "refresh_cache-e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1478.844711] env[63371]: DEBUG oslo_concurrency.lockutils [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] Acquired lock "refresh_cache-e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1478.844865] env[63371]: DEBUG nova.network.neutron [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Refreshing network info cache for port 9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1478.858143] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1478.858464] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1478.858680] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleting the datastore file [datastore1] dcf8063b-56eb-439c-bee5-139a1e157714 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1478.859245] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ed6ddaee-fa3e-459a-8fe4-8f1d382ad30f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.867049] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for the task: (returnval){ [ 1478.867049] env[63371]: value = "task-1773969" [ 1478.867049] env[63371]: _type = "Task" [ 1478.867049] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.876351] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773969, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.899803] env[63371]: DEBUG oslo_vmware.api [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773959, 'name': PowerOnVM_Task, 'duration_secs': 0.565865} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.900121] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1478.900478] env[63371]: INFO nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1478.900704] env[63371]: DEBUG nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1478.901815] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7669ebc4-79bc-4f2a-b825-922d09a60a52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.941253] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773963, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.020820] env[63371]: DEBUG nova.compute.utils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1479.025769] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1479.026786] env[63371]: DEBUG nova.network.neutron [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1479.031644] env[63371]: DEBUG nova.compute.manager [req-cc3776d9-d064-4d0c-8c70-af70b25f37c1 req-0071475f-7c0f-4fca-85de-45bda1de0782 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Received event network-vif-deleted-59bb4dc3-13e6-4180-bec1-3a41954f8d62 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1479.031863] env[63371]: INFO nova.compute.manager [req-cc3776d9-d064-4d0c-8c70-af70b25f37c1 req-0071475f-7c0f-4fca-85de-45bda1de0782 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Neutron deleted interface 59bb4dc3-13e6-4180-bec1-3a41954f8d62; detaching it from the instance and deleting it from the info cache [ 1479.032566] env[63371]: DEBUG nova.network.neutron [req-cc3776d9-d064-4d0c-8c70-af70b25f37c1 req-0071475f-7c0f-4fca-85de-45bda1de0782 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.094300] env[63371]: DEBUG nova.policy [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1b2f698ebd747d6a84ac3f3e05e97b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a5b81b233f640b186d9798ff57a4945', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1479.236360] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773965, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.242393] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.296904] env[63371]: DEBUG oslo_vmware.api [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773968, 'name': PowerOffVM_Task, 'duration_secs': 0.353319} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.299837] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1479.300066] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1479.301522] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-24832d49-aa50-4411-8db7-75b714c55e85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.376152] env[63371]: DEBUG oslo_vmware.api [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Task: {'id': task-1773969, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.349644} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.376432] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.376617] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1479.376788] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1479.376957] env[63371]: INFO nova.compute.manager [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1479.377208] env[63371]: DEBUG oslo.service.loopingcall [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1479.377401] env[63371]: DEBUG nova.compute.manager [-] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1479.377493] env[63371]: DEBUG nova.network.neutron [-] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1479.389014] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1479.390212] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1479.390524] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Deleting the datastore file [datastore1] dc6ef0a7-1744-4b90-b385-913cb796f7d0 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1479.392620] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4efece65-753a-4526-93e6-4ad77b95228d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.396448] env[63371]: DEBUG oslo_vmware.api [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1479.396448] env[63371]: value = "task-1773971" [ 1479.396448] env[63371]: _type = "Task" [ 1479.396448] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.409965] env[63371]: DEBUG oslo_vmware.api [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.422584] env[63371]: INFO nova.compute.manager [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Took 45.13 seconds to build instance. [ 1479.445125] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773963, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.67037} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.445394] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7e66011a-4fed-471f-82ea-e1016f92ad39/7e66011a-4fed-471f-82ea-e1016f92ad39.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1479.445606] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1479.445835] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29f0c5d5-9a9c-40a5-9489-5f409e0f47d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.455793] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1479.455793] env[63371]: value = "task-1773972" [ 1479.455793] env[63371]: _type = "Task" [ 1479.455793] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.464824] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773972, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.503167] env[63371]: DEBUG nova.network.neutron [-] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.530618] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1479.542740] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5271884-143e-4de2-96bb-7dc0dd97663c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.552160] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf59b941-ba26-499d-b675-ee2ce972f746 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.593354] env[63371]: DEBUG nova.compute.manager [req-cc3776d9-d064-4d0c-8c70-af70b25f37c1 req-0071475f-7c0f-4fca-85de-45bda1de0782 service nova] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Detach interface failed, port_id=59bb4dc3-13e6-4180-bec1-3a41954f8d62, reason: Instance 64fc862c-a755-4cac-997b-7a8328638269 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1479.594540] env[63371]: DEBUG nova.network.neutron [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Successfully created port: 3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1479.650416] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb7243d-f5ff-4e92-bc1a-7a390c6329a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.666048] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc58988-67a0-4412-af1d-91fdbeb2a111 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.704612] env[63371]: DEBUG nova.network.neutron [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Updated VIF entry in instance network info cache for port 9a55b6f2-f084-4989-9b8c-434c1a1deab6. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1479.705857] env[63371]: DEBUG nova.network.neutron [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Updating instance_info_cache with network_info: [{"id": "9a55b6f2-f084-4989-9b8c-434c1a1deab6", "address": "fa:16:3e:eb:6a:af", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a55b6f2-f0", "ovs_interfaceid": "9a55b6f2-f084-4989-9b8c-434c1a1deab6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1479.708084] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b452ba34-eb7a-41a8-bbc6-19a14fffe7bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.716773] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604589ce-3d56-4980-bd2d-6aebee3d3f2c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.735335] env[63371]: DEBUG nova.compute.provider_tree [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1479.742519] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773965, 'name': CreateVM_Task, 'duration_secs': 0.527978} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.742881] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1479.745208] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1479.745208] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1479.745208] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1479.745540] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dea2026c-4743-4a29-bf19-565cac8b7c61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.752548] env[63371]: DEBUG oslo_vmware.api [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Task: {'id': task-1773966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.575867} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.754083] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.754083] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1479.754083] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1479.754083] env[63371]: INFO nova.compute.manager [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1479.754261] env[63371]: DEBUG oslo.service.loopingcall [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1479.754405] env[63371]: DEBUG nova.compute.manager [-] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1479.754664] env[63371]: DEBUG nova.network.neutron [-] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1479.757631] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1479.757631] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528c3742-fc88-c1dd-8a44-e80eae1694da" [ 1479.757631] env[63371]: _type = "Task" [ 1479.757631] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.766644] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528c3742-fc88-c1dd-8a44-e80eae1694da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.777219] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.777538] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.777756] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1479.777930] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1479.778596] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.780113] env[63371]: INFO nova.compute.manager [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Terminating instance [ 1479.781899] env[63371]: DEBUG nova.compute.manager [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1479.782048] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1479.782846] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9438631-eb00-4006-a2cd-ac1e2517f6a1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.795026] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1479.795026] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a3fef41-34fd-4317-b63c-268cec7f3ea6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.799028] env[63371]: DEBUG oslo_vmware.api [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1479.799028] env[63371]: value = "task-1773973" [ 1479.799028] env[63371]: _type = "Task" [ 1479.799028] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.806664] env[63371]: DEBUG oslo_vmware.api [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773973, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.907620] env[63371]: DEBUG oslo_vmware.api [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1773971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.314217} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.908034] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1479.908247] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1479.908468] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1479.908674] env[63371]: INFO nova.compute.manager [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1479.908918] env[63371]: DEBUG oslo.service.loopingcall [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1479.909119] env[63371]: DEBUG nova.compute.manager [-] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1479.909211] env[63371]: DEBUG nova.network.neutron [-] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1479.929763] env[63371]: DEBUG oslo_concurrency.lockutils [None req-60361c48-8906-4604-8d9b-b831f2d84812 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 109.159s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1479.969156] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773972, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.214906} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.972128] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1479.972976] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f92ab6f-2e0b-4be9-9663-8c474b5383b7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.998028] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 7e66011a-4fed-471f-82ea-e1016f92ad39/7e66011a-4fed-471f-82ea-e1016f92ad39.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1479.998028] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d9b084d-25d6-4c8c-941a-2bde20ccd278 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.012610] env[63371]: INFO nova.compute.manager [-] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Took 1.39 seconds to deallocate network for instance. [ 1480.024247] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1480.024247] env[63371]: value = "task-1773974" [ 1480.024247] env[63371]: _type = "Task" [ 1480.024247] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.034920] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773974, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.119215] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.119350] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.119531] env[63371]: DEBUG nova.network.neutron [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1480.212240] env[63371]: DEBUG oslo_concurrency.lockutils [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] Releasing lock "refresh_cache-e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.212537] env[63371]: DEBUG nova.compute.manager [req-7dc3acc0-83ef-469c-8359-b4b957e9f653 req-0e190c28-6a09-4e09-bf64-7d906770c241 service nova] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Received event network-vif-deleted-78d7a9b4-2512-4b55-95e3-50aa146658fa {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1480.244014] env[63371]: DEBUG nova.scheduler.client.report [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1480.270265] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528c3742-fc88-c1dd-8a44-e80eae1694da, 'name': SearchDatastore_Task, 'duration_secs': 0.00936} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.270545] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1480.271216] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1480.271216] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1480.271512] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1480.271512] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1480.272480] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb624a58-24d9-44ee-9037-5caef072d544 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.280577] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1480.280781] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1480.281611] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-062edc76-4686-4f8d-a461-a33cd31f36cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.290484] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1480.290484] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a6c0a8-766c-d174-1332-0833dcbdf2db" [ 1480.290484] env[63371]: _type = "Task" [ 1480.290484] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.300828] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a6c0a8-766c-d174-1332-0833dcbdf2db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.308476] env[63371]: DEBUG oslo_vmware.api [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773973, 'name': PowerOffVM_Task, 'duration_secs': 0.28681} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.308727] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1480.308892] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1480.309151] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7f1e4e5-76f7-4597-aa7e-5aa47b6193cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.340023] env[63371]: DEBUG nova.network.neutron [-] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.389148] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1480.389311] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1480.389498] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleting the datastore file [datastore1] 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1480.390199] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ad5501e3-33b5-4447-b1e5-cab58a2a21f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.396429] env[63371]: DEBUG oslo_vmware.api [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1480.396429] env[63371]: value = "task-1773976" [ 1480.396429] env[63371]: _type = "Task" [ 1480.396429] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.404665] env[63371]: DEBUG oslo_vmware.api [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773976, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.432852] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1480.521854] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.529511] env[63371]: DEBUG nova.network.neutron [-] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.534689] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773974, 'name': ReconfigVM_Task, 'duration_secs': 0.29383} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.534784] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 7e66011a-4fed-471f-82ea-e1016f92ad39/7e66011a-4fed-471f-82ea-e1016f92ad39.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1480.535714] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2775610b-b2be-4d5d-8570-83102540dd16 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.543861] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1480.545866] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1480.545866] env[63371]: value = "task-1773977" [ 1480.545866] env[63371]: _type = "Task" [ 1480.545866] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.554496] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773977, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.564805] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1480.565075] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1480.565262] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1480.565449] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1480.565595] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1480.565763] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1480.565972] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1480.566144] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1480.566325] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1480.566515] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1480.566718] env[63371]: DEBUG nova.virt.hardware [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1480.567705] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4e50e3-bd25-491f-a90c-e47fc07abf42 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.577021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de87b2a2-b3a4-4ba5-8f77-0c094a153054 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.750490] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.234s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1480.752932] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.398s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1480.754722] env[63371]: INFO nova.compute.claims [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1480.780783] env[63371]: DEBUG nova.network.neutron [-] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.800517] env[63371]: INFO nova.scheduler.client.report [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Deleted allocations for instance cfbd0c7c-243e-497a-acb1-ab9323c23574 [ 1480.812729] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a6c0a8-766c-d174-1332-0833dcbdf2db, 'name': SearchDatastore_Task, 'duration_secs': 0.010629} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.815941] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f96ad16-8a49-4bd8-9b07-3f215ffe2106 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.824926] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1480.824926] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521caad6-43c7-52b0-37cd-b72a6c01cefe" [ 1480.824926] env[63371]: _type = "Task" [ 1480.824926] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.833850] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521caad6-43c7-52b0-37cd-b72a6c01cefe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.842711] env[63371]: INFO nova.compute.manager [-] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Took 1.47 seconds to deallocate network for instance. [ 1480.908892] env[63371]: DEBUG oslo_vmware.api [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1773976, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.39831} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.910440] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1480.910440] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1480.910440] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1480.910440] env[63371]: INFO nova.compute.manager [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1480.910440] env[63371]: DEBUG oslo.service.loopingcall [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1480.910440] env[63371]: DEBUG nova.compute.manager [-] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1480.910791] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1480.950315] env[63371]: INFO nova.network.neutron [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Port 8e143eba-fc86-4474-91f7-a5785bb2dbe3 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1480.950315] env[63371]: DEBUG nova.network.neutron [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1480.958016] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1480.964178] env[63371]: DEBUG nova.compute.manager [req-4261813e-3602-48d0-9d18-8fb6e493d371 req-b4a806ff-70de-496b-885a-e38adc031586 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-vif-deleted-8e143eba-fc86-4474-91f7-a5785bb2dbe3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1480.964537] env[63371]: INFO nova.compute.manager [req-4261813e-3602-48d0-9d18-8fb6e493d371 req-b4a806ff-70de-496b-885a-e38adc031586 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Neutron deleted interface 8e143eba-fc86-4474-91f7-a5785bb2dbe3; detaching it from the instance and deleting it from the info cache [ 1480.965089] env[63371]: DEBUG nova.network.neutron [req-4261813e-3602-48d0-9d18-8fb6e493d371 req-b4a806ff-70de-496b-885a-e38adc031586 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [{"id": "d92b8632-8794-486c-a8eb-5c8844009035", "address": "fa:16:3e:00:79:85", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.157", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd92b8632-87", "ovs_interfaceid": "d92b8632-8794-486c-a8eb-5c8844009035", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1481.035401] env[63371]: INFO nova.compute.manager [-] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Took 1.28 seconds to deallocate network for instance. [ 1481.055950] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773977, 'name': Rename_Task, 'duration_secs': 0.14507} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.058080] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1481.058080] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4058b5a1-9011-4aae-8b64-b414ffe7c2cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.064046] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1481.064046] env[63371]: value = "task-1773978" [ 1481.064046] env[63371]: _type = "Task" [ 1481.064046] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.072403] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.194252] env[63371]: DEBUG nova.compute.manager [req-97d079f7-e491-4cfd-9391-e1419893e5da req-6fff9424-7b3d-4075-9af4-5f2ecf0a2b5a service nova] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Received event network-vif-deleted-9661bc17-8fdd-42bf-ae5d-bfa211e88e4a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1481.194486] env[63371]: DEBUG nova.compute.manager [req-97d079f7-e491-4cfd-9391-e1419893e5da req-6fff9424-7b3d-4075-9af4-5f2ecf0a2b5a service nova] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Received event network-vif-deleted-912c6f7c-cc28-4f29-a362-7a8079dcc422 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1481.282888] env[63371]: INFO nova.compute.manager [-] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Took 1.37 seconds to deallocate network for instance. [ 1481.309724] env[63371]: DEBUG oslo_concurrency.lockutils [None req-de06255d-23e4-4489-921b-9d088b2b8000 tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "cfbd0c7c-243e-497a-acb1-ab9323c23574" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.306s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1481.338264] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521caad6-43c7-52b0-37cd-b72a6c01cefe, 'name': SearchDatastore_Task, 'duration_secs': 0.436946} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.338604] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.338870] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e6cd62ce-f6d2-4e5b-acbc-7527a94e0932/e6cd62ce-f6d2-4e5b-acbc-7527a94e0932.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1481.339146] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dff1a576-9a3d-414c-8e6c-d1dee144bd2f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.348898] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1481.348898] env[63371]: value = "task-1773979" [ 1481.348898] env[63371]: _type = "Task" [ 1481.348898] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1481.350193] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.358366] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773979, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.436991] env[63371]: DEBUG nova.network.neutron [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Successfully updated port: 3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1481.452453] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1481.469243] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2341766f-f18b-489e-9357-602140ab9803 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.479804] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ad1dbb-61cd-4481-94ed-d51813a7daed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.520063] env[63371]: DEBUG nova.compute.manager [req-4261813e-3602-48d0-9d18-8fb6e493d371 req-b4a806ff-70de-496b-885a-e38adc031586 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Detach interface failed, port_id=8e143eba-fc86-4474-91f7-a5785bb2dbe3, reason: Instance 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1481.520063] env[63371]: DEBUG nova.compute.manager [req-4261813e-3602-48d0-9d18-8fb6e493d371 req-b4a806ff-70de-496b-885a-e38adc031586 service nova] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Received event network-vif-deleted-17aee217-e9ac-4d12-8821-73130231a498 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1481.541313] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.578022] env[63371]: DEBUG oslo_vmware.api [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773978, 'name': PowerOnVM_Task, 'duration_secs': 0.484974} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.578310] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1481.578510] env[63371]: INFO nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Took 8.45 seconds to spawn the instance on the hypervisor. [ 1481.578700] env[63371]: DEBUG nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1481.579521] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192fd488-9c96-4a32-948d-722ed9de6d7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.620224] env[63371]: DEBUG nova.compute.manager [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1481.621083] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef0cc905-899b-4492-bd66-08cc0779f006 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.792795] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1481.867300] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773979, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.940489] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "refresh_cache-594ff846-8e3e-4882-8ddc-41f824a77a5c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1481.940654] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "refresh_cache-594ff846-8e3e-4882-8ddc-41f824a77a5c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1481.940802] env[63371]: DEBUG nova.network.neutron [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1481.958161] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a5974dbd-6f5f-40fa-b418-96ddbb764e2d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-7e463dd7-84a6-4e6d-ae8f-0860e3a20f05-8e143eba-fc86-4474-91f7-a5785bb2dbe3" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.831s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.107152] env[63371]: INFO nova.compute.manager [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Took 35.45 seconds to build instance. [ 1482.131101] env[63371]: INFO nova.compute.manager [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] instance snapshotting [ 1482.133516] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3bbc383-433e-4715-80ae-10217b0d6aef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.161138] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d36bf799-7b00-43e2-9d78-9e599e9ef8ff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.222376] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.282626] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767af8e9-1f21-4488-954d-b0c3f28e8965 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.290310] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de293364-e202-41b8-b28d-9f4e8f578511 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.325855] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee2fd4e-2cbc-4327-86b8-6f402024b076 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.333951] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fe71efd-7345-4d1b-8790-4027dc8b8a76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.347890] env[63371]: DEBUG nova.compute.provider_tree [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1482.359551] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773979, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.489921] env[63371]: DEBUG nova.network.neutron [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1482.610562] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7974f81e-30df-439a-a9e2-11a4e8a698aa tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.948s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1482.673330] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1482.673330] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-21b7bed1-7e92-416c-8a82-7c1966bfcbf8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.683411] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1482.683411] env[63371]: value = "task-1773980" [ 1482.683411] env[63371]: _type = "Task" [ 1482.683411] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.700190] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773980, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1482.709556] env[63371]: DEBUG nova.network.neutron [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Updating instance_info_cache with network_info: [{"id": "3d978143-a770-4100-a97a-b0d9503712e0", "address": "fa:16:3e:08:d7:82", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d978143-a7", "ovs_interfaceid": "3d978143-a770-4100-a97a-b0d9503712e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1482.724083] env[63371]: INFO nova.compute.manager [-] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Took 1.81 seconds to deallocate network for instance. [ 1482.851653] env[63371]: DEBUG nova.scheduler.client.report [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1482.866881] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773979, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.369355} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1482.867190] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e6cd62ce-f6d2-4e5b-acbc-7527a94e0932/e6cd62ce-f6d2-4e5b-acbc-7527a94e0932.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1482.867405] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1482.867658] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-448592eb-cc20-4f65-9737-8e34b9faad39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1482.875368] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1482.875368] env[63371]: value = "task-1773981" [ 1482.875368] env[63371]: _type = "Task" [ 1482.875368] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1482.885368] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773981, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.115465] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1483.195338] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773980, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.214957] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "refresh_cache-594ff846-8e3e-4882-8ddc-41f824a77a5c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1483.215313] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Instance network_info: |[{"id": "3d978143-a770-4100-a97a-b0d9503712e0", "address": "fa:16:3e:08:d7:82", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d978143-a7", "ovs_interfaceid": "3d978143-a770-4100-a97a-b0d9503712e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1483.215739] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:d7:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3d978143-a770-4100-a97a-b0d9503712e0', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1483.223446] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating folder: Project (2a5b81b233f640b186d9798ff57a4945). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1483.224044] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-97ff8555-a126-4ad0-803b-34a96e8c958c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.230917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.235140] env[63371]: DEBUG nova.compute.manager [req-4a044077-06b7-4dcc-b3eb-a348884b9eec req-f794de87-a979-4298-a9ea-a108c1424f82 service nova] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Received event network-vif-deleted-d92b8632-8794-486c-a8eb-5c8844009035 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1483.236942] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Created folder: Project (2a5b81b233f640b186d9798ff57a4945) in parent group-v368199. [ 1483.237152] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating folder: Instances. Parent ref: group-v368324. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1483.237417] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-69af363e-97c6-4cfa-884b-e6af55fcc7b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.246442] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Created folder: Instances in parent group-v368324. [ 1483.246693] env[63371]: DEBUG oslo.service.loopingcall [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1483.246895] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1483.247110] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-49bb16f7-b83c-4183-b33a-3524f74ba6a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.266281] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1483.266281] env[63371]: value = "task-1773984" [ 1483.266281] env[63371]: _type = "Task" [ 1483.266281] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.273949] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773984, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.362519] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.609s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.363257] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1483.368636] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.401s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.369068] env[63371]: INFO nova.compute.claims [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1483.386191] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773981, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068384} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.386516] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1483.387595] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc974ed7-b227-4dbd-a650-742c327e1ddc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.419937] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] e6cd62ce-f6d2-4e5b-acbc-7527a94e0932/e6cd62ce-f6d2-4e5b-acbc-7527a94e0932.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1483.421620] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b192858-693c-4d4b-974f-45b84140b7fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.445472] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1483.445472] env[63371]: value = "task-1773985" [ 1483.445472] env[63371]: _type = "Task" [ 1483.445472] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.450981] env[63371]: DEBUG nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Received event network-vif-plugged-3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1483.451205] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Acquiring lock "594ff846-8e3e-4882-8ddc-41f824a77a5c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.451416] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1483.451612] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1483.451776] env[63371]: DEBUG nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] No waiting events found dispatching network-vif-plugged-3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1483.451952] env[63371]: WARNING nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Received unexpected event network-vif-plugged-3d978143-a770-4100-a97a-b0d9503712e0 for instance with vm_state building and task_state spawning. [ 1483.452110] env[63371]: DEBUG nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Received event network-changed-3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1483.452264] env[63371]: DEBUG nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Refreshing instance network info cache due to event network-changed-3d978143-a770-4100-a97a-b0d9503712e0. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1483.452446] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Acquiring lock "refresh_cache-594ff846-8e3e-4882-8ddc-41f824a77a5c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1483.452580] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Acquired lock "refresh_cache-594ff846-8e3e-4882-8ddc-41f824a77a5c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1483.452731] env[63371]: DEBUG nova.network.neutron [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Refreshing network info cache for port 3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1483.460769] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773985, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.639547] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1483.696817] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773980, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.776757] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773984, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.874932] env[63371]: DEBUG nova.compute.utils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1483.885085] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1483.885085] env[63371]: DEBUG nova.network.neutron [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1483.937787] env[63371]: DEBUG nova.policy [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e4bf6cfe9124f3a9ea2df44c43611f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3df339d9a704d9b9bebecac3871584c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1483.959154] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773985, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.198972] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773980, 'name': CreateSnapshot_Task, 'duration_secs': 1.101817} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.199790] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1484.200717] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fae0ca22-ef9d-4d63-b735-057841483fd0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.277332] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773984, 'name': CreateVM_Task, 'duration_secs': 0.663234} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.277512] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1484.278213] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.278451] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.278680] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1484.278941] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dbea797-798b-4a2d-91af-ec31be17f030 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.285454] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1484.285454] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5227d947-8d3d-690c-ec67-cf93fe5c1051" [ 1484.285454] env[63371]: _type = "Task" [ 1484.285454] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.294415] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5227d947-8d3d-690c-ec67-cf93fe5c1051, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.331736] env[63371]: DEBUG nova.network.neutron [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Successfully created port: 993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1484.337470] env[63371]: DEBUG nova.network.neutron [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Updated VIF entry in instance network info cache for port 3d978143-a770-4100-a97a-b0d9503712e0. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1484.337823] env[63371]: DEBUG nova.network.neutron [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Updating instance_info_cache with network_info: [{"id": "3d978143-a770-4100-a97a-b0d9503712e0", "address": "fa:16:3e:08:d7:82", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3d978143-a7", "ovs_interfaceid": "3d978143-a770-4100-a97a-b0d9503712e0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1484.383200] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1484.468131] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773985, 'name': ReconfigVM_Task, 'duration_secs': 0.85} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.468131] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Reconfigured VM instance instance-0000002d to attach disk [datastore1] e6cd62ce-f6d2-4e5b-acbc-7527a94e0932/e6cd62ce-f6d2-4e5b-acbc-7527a94e0932.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1484.468369] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f3ce6316-1882-4481-a55d-c190842c0693 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.475681] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1484.475681] env[63371]: value = "task-1773986" [ 1484.475681] env[63371]: _type = "Task" [ 1484.475681] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.494215] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773986, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.719624] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1484.722493] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c5da356a-8e72-483e-b906-ef58c3847724 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.730687] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1484.730687] env[63371]: value = "task-1773987" [ 1484.730687] env[63371]: _type = "Task" [ 1484.730687] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.739052] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773987, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.797687] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5227d947-8d3d-690c-ec67-cf93fe5c1051, 'name': SearchDatastore_Task, 'duration_secs': 0.01065} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.800545] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.800867] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1484.802337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.802337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.802337] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1484.802337] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb28c6e9-b34f-4b7d-8d23-d27444797189 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.811350] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1484.811679] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1484.814744] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b33a150-faad-4409-93f5-e6e0ca96de5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.821150] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1484.821150] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52dd5136-18a6-8751-cc6d-507fbca178e9" [ 1484.821150] env[63371]: _type = "Task" [ 1484.821150] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.837401] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52dd5136-18a6-8751-cc6d-507fbca178e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.840389] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Releasing lock "refresh_cache-594ff846-8e3e-4882-8ddc-41f824a77a5c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1484.840664] env[63371]: DEBUG nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1484.841179] env[63371]: DEBUG nova.compute.manager [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing instance network info cache due to event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1484.841179] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Acquiring lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1484.841179] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Acquired lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1484.841490] env[63371]: DEBUG nova.network.neutron [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1484.945144] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b994ff-a105-4aab-bb79-43671c66a55c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.952422] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9b9d5a-50c0-4c0e-b9ae-a98a8e673bfe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.989078] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b43d5ca-8215-4696-9995-783b9a4b1376 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.997493] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773986, 'name': Rename_Task, 'duration_secs': 0.267076} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.001663] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1485.001663] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-14458ede-0479-41f0-abaf-0378a23ab8b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.003075] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456939b1-bca4-46fb-9b71-d0ea6f8e4f1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.018865] env[63371]: DEBUG nova.compute.provider_tree [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1485.021452] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1485.021452] env[63371]: value = "task-1773988" [ 1485.021452] env[63371]: _type = "Task" [ 1485.021452] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.029860] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773988, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.241876] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773987, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.334147] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52dd5136-18a6-8751-cc6d-507fbca178e9, 'name': SearchDatastore_Task, 'duration_secs': 0.030177} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.335347] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-107d4255-8403-4faf-b6eb-d3fe32943c9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.341141] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1485.341141] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5236545e-3ed6-dbbc-9c08-4c1e8f76ab0b" [ 1485.341141] env[63371]: _type = "Task" [ 1485.341141] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.351395] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5236545e-3ed6-dbbc-9c08-4c1e8f76ab0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.398633] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1485.425891] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1485.426193] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1485.426381] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1485.426611] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1485.426797] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1485.426976] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1485.427414] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1485.427615] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1485.427905] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1485.428456] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1485.428456] env[63371]: DEBUG nova.virt.hardware [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1485.429713] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe41789a-c83d-4f2a-9f94-9b9ef108e408 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.443598] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a3f9cf-ee21-43d6-a506-e14c39b97c44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.523746] env[63371]: DEBUG nova.scheduler.client.report [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1485.536551] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773988, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.614834] env[63371]: DEBUG nova.compute.manager [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1485.615112] env[63371]: DEBUG nova.compute.manager [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing instance network info cache due to event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1485.615353] env[63371]: DEBUG oslo_concurrency.lockutils [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] Acquiring lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1485.678323] env[63371]: DEBUG nova.network.neutron [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updated VIF entry in instance network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1485.678714] env[63371]: DEBUG nova.network.neutron [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.745012] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773987, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1485.853470] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5236545e-3ed6-dbbc-9c08-4c1e8f76ab0b, 'name': SearchDatastore_Task, 'duration_secs': 0.040604} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1485.854052] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1485.854586] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c/594ff846-8e3e-4882-8ddc-41f824a77a5c.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1485.855317] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e126883f-73f9-4690-8b41-e07f8874c7f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.862633] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1485.862633] env[63371]: value = "task-1773989" [ 1485.862633] env[63371]: _type = "Task" [ 1485.862633] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1485.871799] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773989, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.012445] env[63371]: DEBUG nova.compute.manager [req-edd7df5b-6dc3-4750-bd1f-7ca6872d972f req-70d18158-c908-461d-bb9e-98f042562505 service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Received event network-vif-plugged-993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1486.012445] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd7df5b-6dc3-4750-bd1f-7ca6872d972f req-70d18158-c908-461d-bb9e-98f042562505 service nova] Acquiring lock "b523486c-adae-4322-80be-1f3bf33ca192-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1486.012445] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd7df5b-6dc3-4750-bd1f-7ca6872d972f req-70d18158-c908-461d-bb9e-98f042562505 service nova] Lock "b523486c-adae-4322-80be-1f3bf33ca192-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.012445] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd7df5b-6dc3-4750-bd1f-7ca6872d972f req-70d18158-c908-461d-bb9e-98f042562505 service nova] Lock "b523486c-adae-4322-80be-1f3bf33ca192-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.012445] env[63371]: DEBUG nova.compute.manager [req-edd7df5b-6dc3-4750-bd1f-7ca6872d972f req-70d18158-c908-461d-bb9e-98f042562505 service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] No waiting events found dispatching network-vif-plugged-993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1486.012445] env[63371]: WARNING nova.compute.manager [req-edd7df5b-6dc3-4750-bd1f-7ca6872d972f req-70d18158-c908-461d-bb9e-98f042562505 service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Received unexpected event network-vif-plugged-993ff886-27f6-48cd-be00-f0e8d292b060 for instance with vm_state building and task_state spawning. [ 1486.035019] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.666s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1486.035019] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1486.036402] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.192s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1486.038156] env[63371]: INFO nova.compute.claims [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1486.047313] env[63371]: DEBUG oslo_vmware.api [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1773988, 'name': PowerOnVM_Task, 'duration_secs': 0.697824} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.047892] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1486.048235] env[63371]: INFO nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Took 9.98 seconds to spawn the instance on the hypervisor. [ 1486.048553] env[63371]: DEBUG nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1486.050345] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06a9b03-1258-4540-9aca-c76e591bc306 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.136819] env[63371]: DEBUG nova.network.neutron [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Successfully updated port: 993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1486.185562] env[63371]: DEBUG oslo_concurrency.lockutils [req-1574b44a-4708-4b08-ac09-6ca0982fcf46 req-b47f6714-e45e-4792-b860-7553363ac8c9 service nova] Releasing lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1486.185562] env[63371]: DEBUG oslo_concurrency.lockutils [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] Acquired lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.185562] env[63371]: DEBUG nova.network.neutron [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1486.250493] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773987, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.375952] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773989, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482084} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.376359] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c/594ff846-8e3e-4882-8ddc-41f824a77a5c.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1486.376606] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1486.376919] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06d4df2a-56bd-4520-9a62-026636cdc780 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.385451] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1486.385451] env[63371]: value = "task-1773990" [ 1486.385451] env[63371]: _type = "Task" [ 1486.385451] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.395976] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773990, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.545976] env[63371]: DEBUG nova.compute.utils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1486.547806] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1486.548021] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1486.571355] env[63371]: INFO nova.compute.manager [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Took 34.75 seconds to build instance. [ 1486.626260] env[63371]: DEBUG nova.policy [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e88115fd7e2541e08000e93ef9ab0524', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6919de51a2ef456db7a25d4cec1e26ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1486.639126] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1486.639316] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1486.639496] env[63371]: DEBUG nova.network.neutron [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1486.744621] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773987, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1486.895426] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773990, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.177024} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1486.895673] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1486.896493] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f425d77f-a287-4ca2-8324-513073a1d4d5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.919591] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c/594ff846-8e3e-4882-8ddc-41f824a77a5c.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1486.919869] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abe44092-5d72-4f02-8cc3-69830053aa3a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.943341] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1486.943341] env[63371]: value = "task-1773991" [ 1486.943341] env[63371]: _type = "Task" [ 1486.943341] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1486.952296] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773991, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.008666] env[63371]: DEBUG nova.network.neutron [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updated VIF entry in instance network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1487.008808] env[63371]: DEBUG nova.network.neutron [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.023744] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Successfully created port: 2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1487.051719] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1487.073511] env[63371]: DEBUG oslo_concurrency.lockutils [None req-29ecbf30-fc25-439b-89cc-b0f5960b6c96 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.971s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.184966] env[63371]: DEBUG nova.network.neutron [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1487.249526] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1773987, 'name': CloneVM_Task, 'duration_secs': 2.195936} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.252868] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Created linked-clone VM from snapshot [ 1487.254936] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118caf6f-e2a5-4354-8cc7-cb4e7f6f339b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.264525] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Uploading image f30456ca-8289-4e09-8051-20b3017ac4ca {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1487.289320] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1487.289320] env[63371]: value = "vm-368328" [ 1487.289320] env[63371]: _type = "VirtualMachine" [ 1487.289320] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1487.289605] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c3a95de3-19bb-4271-82ff-a025a9eef000 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.296073] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lease: (returnval){ [ 1487.296073] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca6a15-1f81-bdf6-2d32-788a2033cfd2" [ 1487.296073] env[63371]: _type = "HttpNfcLease" [ 1487.296073] env[63371]: } obtained for exporting VM: (result){ [ 1487.296073] env[63371]: value = "vm-368328" [ 1487.296073] env[63371]: _type = "VirtualMachine" [ 1487.296073] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1487.296380] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the lease: (returnval){ [ 1487.296380] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca6a15-1f81-bdf6-2d32-788a2033cfd2" [ 1487.296380] env[63371]: _type = "HttpNfcLease" [ 1487.296380] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1487.306045] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1487.306045] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca6a15-1f81-bdf6-2d32-788a2033cfd2" [ 1487.306045] env[63371]: _type = "HttpNfcLease" [ 1487.306045] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1487.344411] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Successfully created port: 0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1487.400241] env[63371]: DEBUG nova.network.neutron [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updating instance_info_cache with network_info: [{"id": "993ff886-27f6-48cd-be00-f0e8d292b060", "address": "fa:16:3e:14:89:81", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993ff886-27", "ovs_interfaceid": "993ff886-27f6-48cd-be00-f0e8d292b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1487.453593] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773991, 'name': ReconfigVM_Task, 'duration_secs': 0.278961} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.456199] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c/594ff846-8e3e-4882-8ddc-41f824a77a5c.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1487.457205] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc80a647-043d-48e4-8eb5-557eed3144b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.463822] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1487.463822] env[63371]: value = "task-1773993" [ 1487.463822] env[63371]: _type = "Task" [ 1487.463822] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.473601] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773993, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.512966] env[63371]: DEBUG oslo_concurrency.lockutils [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] Releasing lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.512966] env[63371]: DEBUG nova.compute.manager [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Received event network-changed-96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1487.512966] env[63371]: DEBUG nova.compute.manager [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Refreshing instance network info cache due to event network-changed-96760ebc-7de4-48e4-94ac-f0a3a2eab943. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1487.513150] env[63371]: DEBUG oslo_concurrency.lockutils [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] Acquiring lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1487.513150] env[63371]: DEBUG oslo_concurrency.lockutils [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] Acquired lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1487.513303] env[63371]: DEBUG nova.network.neutron [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Refreshing network info cache for port 96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1487.556427] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff9bde4-3f37-4363-8605-5ac0567b58e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.568516] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d34b1a2-af0f-4aba-88ef-5ab9cbf8addd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.607327] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1487.610340] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "7e66011a-4fed-471f-82ea-e1016f92ad39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.610610] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.610822] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1487.610994] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1487.611179] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1487.613719] env[63371]: INFO nova.compute.manager [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Terminating instance [ 1487.615868] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d037e767-3a90-460d-a8cb-d180c52deb51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.619163] env[63371]: DEBUG nova.compute.manager [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1487.619462] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1487.620277] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-992c51e2-0cae-4f17-b246-0bbafdcbc0e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.631198] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a547fb7-ffa2-43de-a15f-077ffe394a82 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.635888] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1487.636109] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d6d0872-d732-43b9-8752-a8c5056e82ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.650983] env[63371]: DEBUG nova.compute.provider_tree [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1487.654315] env[63371]: DEBUG oslo_vmware.api [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1487.654315] env[63371]: value = "task-1773994" [ 1487.654315] env[63371]: _type = "Task" [ 1487.654315] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.666872] env[63371]: DEBUG oslo_vmware.api [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773994, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.805582] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1487.805582] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca6a15-1f81-bdf6-2d32-788a2033cfd2" [ 1487.805582] env[63371]: _type = "HttpNfcLease" [ 1487.805582] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1487.805582] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1487.805582] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca6a15-1f81-bdf6-2d32-788a2033cfd2" [ 1487.805582] env[63371]: _type = "HttpNfcLease" [ 1487.805582] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1487.807074] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3ba7f9-da21-4c30-8dce-e1f7180b510e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.812989] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278e13a-e67d-2d6f-34be-e2faca7779f3/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1487.813307] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278e13a-e67d-2d6f-34be-e2faca7779f3/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1487.872016] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Successfully created port: f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1487.907069] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Releasing lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1487.907069] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Instance network_info: |[{"id": "993ff886-27f6-48cd-be00-f0e8d292b060", "address": "fa:16:3e:14:89:81", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993ff886-27", "ovs_interfaceid": "993ff886-27f6-48cd-be00-f0e8d292b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1487.907069] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:89:81', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d6abf71-e893-4dec-9a05-0fe7d6c0624e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '993ff886-27f6-48cd-be00-f0e8d292b060', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1487.912094] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Creating folder: Project (f3df339d9a704d9b9bebecac3871584c). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1487.912446] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a5097d9-9f47-4612-932a-4690e68440fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.922895] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Created folder: Project (f3df339d9a704d9b9bebecac3871584c) in parent group-v368199. [ 1487.923111] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Creating folder: Instances. Parent ref: group-v368329. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1487.923372] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-713276ab-e17b-4f58-b7e9-dd4984ca6ab7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.931623] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Created folder: Instances in parent group-v368329. [ 1487.931871] env[63371]: DEBUG oslo.service.loopingcall [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1487.932070] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1487.932287] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ce8f153e-307a-453b-8b96-d8c436a7fa3c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.952926] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7ae84007-f576-49ba-a46c-6fbab18482cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.956830] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1487.956830] env[63371]: value = "task-1773997" [ 1487.956830] env[63371]: _type = "Task" [ 1487.956830] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1487.967930] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773997, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1487.977022] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773993, 'name': Rename_Task, 'duration_secs': 0.145549} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1487.981021] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1487.981021] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-540126dd-2664-479e-8a2b-985976982696 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1487.987358] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1487.987358] env[63371]: value = "task-1773998" [ 1487.987358] env[63371]: _type = "Task" [ 1487.987358] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.004453] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773998, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.066223] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1488.094470] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1488.094706] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1488.094851] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1488.096017] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1488.096017] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1488.096017] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1488.096017] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1488.096017] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1488.096017] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1488.096317] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1488.096317] env[63371]: DEBUG nova.virt.hardware [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1488.097192] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb545a1c-ef48-42d0-bc97-59ae92cf1ad5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.106334] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd6cfaae-b9da-47df-94f7-53d1d0aa09fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.153797] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.156208] env[63371]: DEBUG nova.scheduler.client.report [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1488.171899] env[63371]: DEBUG oslo_vmware.api [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1773994, 'name': PowerOffVM_Task, 'duration_secs': 0.45618} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.172178] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1488.172341] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1488.172585] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6df90de-c58c-4a45-8d24-c6f2795d14ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.254026] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1488.254647] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1488.254765] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Deleting the datastore file [datastore1] 7e66011a-4fed-471f-82ea-e1016f92ad39 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1488.255093] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48a23219-9cdf-4238-9858-b3296e4de8de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.264292] env[63371]: DEBUG oslo_vmware.api [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1488.264292] env[63371]: value = "task-1774000" [ 1488.264292] env[63371]: _type = "Task" [ 1488.264292] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.272557] env[63371]: DEBUG oslo_vmware.api [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1774000, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.277607] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "1c93487b-6d8f-424d-8b95-10bfb894c609" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1488.277870] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "1c93487b-6d8f-424d-8b95-10bfb894c609" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.364843] env[63371]: DEBUG nova.network.neutron [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updated VIF entry in instance network info cache for port 96760ebc-7de4-48e4-94ac-f0a3a2eab943. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1488.365233] env[63371]: DEBUG nova.network.neutron [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [{"id": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "address": "fa:16:3e:cb:30:e4", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96760ebc-7d", "ovs_interfaceid": "96760ebc-7de4-48e4-94ac-f0a3a2eab943", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1488.467120] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1773997, 'name': CreateVM_Task, 'duration_secs': 0.369673} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.467413] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1488.467982] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.468376] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.468508] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1488.468725] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cef0fb30-7634-4141-a2bc-a4e487c5d802 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.473470] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1488.473470] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52321aa7-b8a3-9011-05bb-16e32060d088" [ 1488.473470] env[63371]: _type = "Task" [ 1488.473470] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1488.481893] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52321aa7-b8a3-9011-05bb-16e32060d088, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.500307] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773998, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1488.575585] env[63371]: DEBUG nova.compute.manager [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Received event network-changed-993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1488.575912] env[63371]: DEBUG nova.compute.manager [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Refreshing instance network info cache due to event network-changed-993ff886-27f6-48cd-be00-f0e8d292b060. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1488.576191] env[63371]: DEBUG oslo_concurrency.lockutils [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] Acquiring lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.576398] env[63371]: DEBUG oslo_concurrency.lockutils [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] Acquired lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.576974] env[63371]: DEBUG nova.network.neutron [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Refreshing network info cache for port 993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1488.662656] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1488.663580] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1488.669717] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.611s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1488.670040] env[63371]: DEBUG nova.objects.instance [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lazy-loading 'resources' on Instance uuid e00c2e45-b8bc-440b-8b58-a21f127192c7 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1488.775215] env[63371]: DEBUG oslo_vmware.api [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1774000, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227808} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.778749] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1488.778749] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1488.778749] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1488.778749] env[63371]: INFO nova.compute.manager [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1488.778749] env[63371]: DEBUG oslo.service.loopingcall [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1488.778749] env[63371]: DEBUG nova.compute.manager [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1488.778749] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1488.869322] env[63371]: DEBUG oslo_concurrency.lockutils [req-6099e39c-b89f-4198-94b6-37ff2ca2fba4 req-341abd03-9bb8-4afc-8a9c-306ab94372d1 service nova] Releasing lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.987950] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52321aa7-b8a3-9011-05bb-16e32060d088, 'name': SearchDatastore_Task, 'duration_secs': 0.012417} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1488.988490] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1488.988745] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1488.988973] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1488.989412] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1488.989629] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1488.989937] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0f8442be-b3e1-4ab2-b0d8-f49586591c45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.004068] env[63371]: DEBUG oslo_vmware.api [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1773998, 'name': PowerOnVM_Task, 'duration_secs': 0.555855} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.006281] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1489.009805] env[63371]: INFO nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1489.009805] env[63371]: DEBUG nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1489.009805] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1489.009805] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1489.009805] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626e0b11-b1c5-4644-a7f2-95454763f270 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.011500] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c3f41e0-852f-4097-a395-50adeeca53c7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.021920] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1489.021920] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52292885-3021-8130-5634-cec2137e330f" [ 1489.021920] env[63371]: _type = "Task" [ 1489.021920] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.033491] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52292885-3021-8130-5634-cec2137e330f, 'name': SearchDatastore_Task, 'duration_secs': 0.010292} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.034505] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e950d806-9f4f-47ff-b7ae-da9f8c6b45b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.040255] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1489.040255] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526504b5-fca5-ab91-b4d1-d2ada27d1011" [ 1489.040255] env[63371]: _type = "Task" [ 1489.040255] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.050259] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526504b5-fca5-ab91-b4d1-d2ada27d1011, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.171396] env[63371]: DEBUG nova.compute.utils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1489.173928] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1489.173928] env[63371]: DEBUG nova.network.neutron [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1489.312042] env[63371]: DEBUG nova.policy [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd6aa709a53564231ac25fb3e878239ab', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c76a64c712ca4aa98c19600ef0469855', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1489.453429] env[63371]: DEBUG nova.compute.manager [req-b5d2038d-49f2-4e93-8440-35ed875ac530 req-b930ac52-766c-4fff-a2b0-ce1956ffffcd service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Received event network-vif-deleted-96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1489.453683] env[63371]: INFO nova.compute.manager [req-b5d2038d-49f2-4e93-8440-35ed875ac530 req-b930ac52-766c-4fff-a2b0-ce1956ffffcd service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Neutron deleted interface 96760ebc-7de4-48e4-94ac-f0a3a2eab943; detaching it from the instance and deleting it from the info cache [ 1489.453936] env[63371]: DEBUG nova.network.neutron [req-b5d2038d-49f2-4e93-8440-35ed875ac530 req-b930ac52-766c-4fff-a2b0-ce1956ffffcd service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.514992] env[63371]: DEBUG nova.network.neutron [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updated VIF entry in instance network info cache for port 993ff886-27f6-48cd-be00-f0e8d292b060. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1489.515524] env[63371]: DEBUG nova.network.neutron [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updating instance_info_cache with network_info: [{"id": "993ff886-27f6-48cd-be00-f0e8d292b060", "address": "fa:16:3e:14:89:81", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993ff886-27", "ovs_interfaceid": "993ff886-27f6-48cd-be00-f0e8d292b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.536577] env[63371]: INFO nova.compute.manager [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Took 32.87 seconds to build instance. [ 1489.556772] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526504b5-fca5-ab91-b4d1-d2ada27d1011, 'name': SearchDatastore_Task, 'duration_secs': 0.010553} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1489.562046] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1489.562409] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b523486c-adae-4322-80be-1f3bf33ca192/b523486c-adae-4322-80be-1f3bf33ca192.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1489.563695] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69ae438d-efe4-4841-9266-30e2b90a35de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.572708] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1489.572708] env[63371]: value = "task-1774001" [ 1489.572708] env[63371]: _type = "Task" [ 1489.572708] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1489.588841] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774001, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1489.649581] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1489.676694] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1489.790433] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d92888c-7c60-4e0e-a41c-a717e0d6edd8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.803108] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7de8f29-f9bb-44f7-b143-603b70671061 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.812649] env[63371]: DEBUG nova.network.neutron [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Successfully created port: e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1489.844795] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d27898-3f42-4c6f-80ae-c5c7215e71b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.855030] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c6497c-aa2f-426a-99d7-c8a35f257fa6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.873181] env[63371]: DEBUG nova.compute.provider_tree [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1489.959798] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d90381f-49a6-46db-ac92-006796b2103c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1489.970319] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f0cc14-3bb0-4bcd-995d-c2db5d0b0e9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.010211] env[63371]: DEBUG nova.compute.manager [req-b5d2038d-49f2-4e93-8440-35ed875ac530 req-b930ac52-766c-4fff-a2b0-ce1956ffffcd service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Detach interface failed, port_id=96760ebc-7de4-48e4-94ac-f0a3a2eab943, reason: Instance 7e66011a-4fed-471f-82ea-e1016f92ad39 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1490.019087] env[63371]: DEBUG oslo_concurrency.lockutils [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] Releasing lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1490.019375] env[63371]: DEBUG nova.compute.manager [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Received event network-changed-96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1490.020023] env[63371]: DEBUG nova.compute.manager [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Refreshing instance network info cache due to event network-changed-96760ebc-7de4-48e4-94ac-f0a3a2eab943. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1490.020996] env[63371]: DEBUG oslo_concurrency.lockutils [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] Acquiring lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.021204] env[63371]: DEBUG oslo_concurrency.lockutils [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] Acquired lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.021378] env[63371]: DEBUG nova.network.neutron [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Refreshing network info cache for port 96760ebc-7de4-48e4-94ac-f0a3a2eab943 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1490.040436] env[63371]: DEBUG oslo_concurrency.lockutils [None req-04bd0a59-0bc2-440c-8607-04d90f8d3499 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.184s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.089573] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774001, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.125472] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Successfully updated port: 2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1490.154716] env[63371]: INFO nova.compute.manager [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Took 1.38 seconds to deallocate network for instance. [ 1490.376838] env[63371]: DEBUG nova.scheduler.client.report [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1490.539739] env[63371]: DEBUG nova.network.neutron [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1490.543178] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1490.588221] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774001, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575555} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1490.588522] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b523486c-adae-4322-80be-1f3bf33ca192/b523486c-adae-4322-80be-1f3bf33ca192.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1490.588707] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1490.588971] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40c97e6c-2a0c-46c6-99f2-6114352ec671 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.596507] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1490.596507] env[63371]: value = "task-1774002" [ 1490.596507] env[63371]: _type = "Task" [ 1490.596507] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1490.608917] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774002, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1490.661342] env[63371]: DEBUG nova.network.neutron [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1490.663815] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1490.689334] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1490.714895] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:32:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bb3da7ed-b700-420c-a825-23c0d1a3f881',id=26,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2130760861',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1490.715480] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1490.715480] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1490.715619] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1490.715658] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1490.715815] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1490.716040] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1490.716203] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1490.716367] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1490.716524] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1490.716690] env[63371]: DEBUG nova.virt.hardware [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1490.717561] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa7e56a-785d-4c43-ac2b-77eff8c47707 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.725757] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07918d7d-6ebe-40df-9929-e6c53c0069e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1490.882237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.212s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1490.886093] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1490.886093] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing instance network info cache due to event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1490.886093] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquiring lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1490.886537] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquired lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1490.886537] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1490.887743] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.618s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1490.888101] env[63371]: DEBUG nova.objects.instance [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lazy-loading 'resources' on Instance uuid cbcdfe1a-86a4-4a12-99b5-44d291d41769 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1490.916687] env[63371]: INFO nova.scheduler.client.report [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Deleted allocations for instance e00c2e45-b8bc-440b-8b58-a21f127192c7 [ 1491.066691] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.111453] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774002, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073431} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.112390] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1491.112856] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac73b958-3743-474c-90d1-dc1c4b6c3ffe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.137717] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] b523486c-adae-4322-80be-1f3bf33ca192/b523486c-adae-4322-80be-1f3bf33ca192.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1491.137717] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3cd0369a-6a07-4cd0-9053-8977201987a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.159662] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1491.159662] env[63371]: value = "task-1774003" [ 1491.159662] env[63371]: _type = "Task" [ 1491.159662] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.165609] env[63371]: DEBUG oslo_concurrency.lockutils [req-b04ed014-a320-4bcc-8ffe-a61d46c0c2da req-159da386-9c94-477c-8a43-e4277fcefd0b service nova] Releasing lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1491.171732] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774003, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.234466] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.235475] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.235868] env[63371]: DEBUG nova.compute.manager [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1491.237356] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c0c257-dd72-44a2-a921-3bb0afd6c02a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.245610] env[63371]: DEBUG nova.compute.manager [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63371) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1491.246631] env[63371]: DEBUG nova.objects.instance [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lazy-loading 'flavor' on Instance uuid 594ff846-8e3e-4882-8ddc-41f824a77a5c {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1491.264834] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "7e66011a-4fed-471f-82ea-e1016f92ad39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.432199] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fddf9a9-c4e3-4e87-ac84-c618e54f7d9c tempest-ServersTestBootFromVolume-143903165 tempest-ServersTestBootFromVolume-143903165-project-member] Lock "e00c2e45-b8bc-440b-8b58-a21f127192c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.840s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.518406] env[63371]: DEBUG nova.compute.manager [req-0500cfe1-aad1-4b54-b504-da01f2151bdc req-a0e863ed-1d13-42e4-8816-45ffe147b4c1 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Received event network-vif-plugged-e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1491.519132] env[63371]: DEBUG oslo_concurrency.lockutils [req-0500cfe1-aad1-4b54-b504-da01f2151bdc req-a0e863ed-1d13-42e4-8816-45ffe147b4c1 service nova] Acquiring lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1491.519316] env[63371]: DEBUG oslo_concurrency.lockutils [req-0500cfe1-aad1-4b54-b504-da01f2151bdc req-a0e863ed-1d13-42e4-8816-45ffe147b4c1 service nova] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1491.519495] env[63371]: DEBUG oslo_concurrency.lockutils [req-0500cfe1-aad1-4b54-b504-da01f2151bdc req-a0e863ed-1d13-42e4-8816-45ffe147b4c1 service nova] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1491.519666] env[63371]: DEBUG nova.compute.manager [req-0500cfe1-aad1-4b54-b504-da01f2151bdc req-a0e863ed-1d13-42e4-8816-45ffe147b4c1 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] No waiting events found dispatching network-vif-plugged-e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1491.519826] env[63371]: WARNING nova.compute.manager [req-0500cfe1-aad1-4b54-b504-da01f2151bdc req-a0e863ed-1d13-42e4-8816-45ffe147b4c1 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Received unexpected event network-vif-plugged-e13a7d6d-6643-4b64-a4b1-2a59397c5307 for instance with vm_state building and task_state spawning. [ 1491.674959] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774003, 'name': ReconfigVM_Task, 'duration_secs': 0.359094} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1491.675313] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Reconfigured VM instance instance-0000002f to attach disk [datastore1] b523486c-adae-4322-80be-1f3bf33ca192/b523486c-adae-4322-80be-1f3bf33ca192.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1491.676050] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cf9ae12e-8bbe-41cd-97c8-8d2bd9f6f700 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.685382] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1491.685382] env[63371]: value = "task-1774004" [ 1491.685382] env[63371]: _type = "Task" [ 1491.685382] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.699933] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774004, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.751888] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1491.752204] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac44a05f-1ac7-4df0-a60b-4a9c8777e43f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1491.759970] env[63371]: DEBUG oslo_vmware.api [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1491.759970] env[63371]: value = "task-1774005" [ 1491.759970] env[63371]: _type = "Task" [ 1491.759970] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1491.769166] env[63371]: DEBUG oslo_vmware.api [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774005, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1491.815011] env[63371]: DEBUG nova.network.neutron [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Successfully updated port: e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1491.836207] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updated VIF entry in instance network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1491.836915] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1492.001797] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9447b2-f2b3-4b7e-a574-cfd2f5673a89 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.013513] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac6432f-d344-4b85-a6f2-5a1dac2c88a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.044577] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7605d7ff-d812-446f-9989-27482a768043 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.052772] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8963124f-421b-4324-bacd-16e584b9a670 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.067855] env[63371]: DEBUG nova.compute.provider_tree [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1492.200506] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774004, 'name': Rename_Task, 'duration_secs': 0.163363} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.200786] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1492.201066] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b5edba7-13fc-4a0b-beb8-7ac86f4798ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.207308] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1492.207308] env[63371]: value = "task-1774006" [ 1492.207308] env[63371]: _type = "Task" [ 1492.207308] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1492.219627] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774006, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1492.269803] env[63371]: DEBUG oslo_vmware.api [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774005, 'name': PowerOffVM_Task, 'duration_secs': 0.236749} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.270056] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1492.270238] env[63371]: DEBUG nova.compute.manager [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1492.271054] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23f2de0-2d51-4dab-a0a4-469e5e2f1f72 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.317936] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.318174] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.318286] env[63371]: DEBUG nova.network.neutron [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1492.343325] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Releasing lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1492.343549] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-vif-plugged-2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1492.343746] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquiring lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1492.343941] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1492.344115] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.344280] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] No waiting events found dispatching network-vif-plugged-2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1492.344443] env[63371]: WARNING nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received unexpected event network-vif-plugged-2691ba66-0c30-4f84-af20-63a2d5a37564 for instance with vm_state building and task_state spawning. [ 1492.344599] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1492.344828] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing instance network info cache due to event network-changed-fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1492.345067] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquiring lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1492.345215] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquired lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1492.345385] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Refreshing network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1492.408609] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Successfully updated port: 0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1492.573030] env[63371]: DEBUG nova.scheduler.client.report [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1492.718194] env[63371]: DEBUG oslo_vmware.api [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774006, 'name': PowerOnVM_Task, 'duration_secs': 0.50165} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1492.718530] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1492.718740] env[63371]: INFO nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Took 7.32 seconds to spawn the instance on the hypervisor. [ 1492.718930] env[63371]: DEBUG nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1492.719856] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187bc59f-89ab-4cd7-9b48-0fdedcdc47f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1492.789904] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ed5c219-f1ab-4efc-90fa-ce0a2a713870 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.553s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1492.868331] env[63371]: DEBUG nova.network.neutron [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1493.079294] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.191s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.082047] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.980s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1493.082275] env[63371]: DEBUG nova.objects.instance [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lazy-loading 'resources' on Instance uuid b5e259ea-d103-41c6-84b3-748813bb514d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1493.103486] env[63371]: INFO nova.scheduler.client.report [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Deleted allocations for instance cbcdfe1a-86a4-4a12-99b5-44d291d41769 [ 1493.143867] env[63371]: DEBUG nova.network.neutron [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.215904] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updated VIF entry in instance network info cache for port fcd67cd5-500d-457a-9bbb-655583d97dd2. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1493.216327] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [{"id": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "address": "fa:16:3e:f2:92:a7", "network": {"id": "49f09f27-e705-4c87-9772-a24ee44e674d", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-539450604-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3fa37041acf4211987c97c105c47cf0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca99f7a1-6365-4d3c-af16-1b1c1288091e", "external-id": "cl2-zone-334", "segmentation_id": 334, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfcd67cd5-50", "ovs_interfaceid": "fcd67cd5-500d-457a-9bbb-655583d97dd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1493.237733] env[63371]: INFO nova.compute.manager [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Took 34.91 seconds to build instance. [ 1493.615864] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d7fbc2ee-7f52-4f94-b6a2-db6cec57eb31 tempest-VolumesAssistedSnapshotsTest-1230471284 tempest-VolumesAssistedSnapshotsTest-1230471284-project-member] Lock "cbcdfe1a-86a4-4a12-99b5-44d291d41769" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.888s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.653516] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.653833] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Instance network_info: |[{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1493.654299] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:1c:c5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cbf3349e-d05e-4d44-a011-c4b6e41af988', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e13a7d6d-6643-4b64-a4b1-2a59397c5307', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1493.666061] env[63371]: DEBUG oslo.service.loopingcall [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1493.669825] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1493.670362] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-118fff70-a944-4564-ba07-3771ed0fb23c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1493.693998] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1493.693998] env[63371]: value = "task-1774007" [ 1493.693998] env[63371]: _type = "Task" [ 1493.693998] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1493.702680] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774007, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1493.719690] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Releasing lock "refresh_cache-e0369f27-68ea-49c4-8524-3dbbb3cde96e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1493.719962] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-changed-2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1493.720250] env[63371]: DEBUG nova.compute.manager [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Refreshing instance network info cache due to event network-changed-2691ba66-0c30-4f84-af20-63a2d5a37564. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1493.720335] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquiring lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.720471] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Acquired lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.720663] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Refreshing network info cache for port 2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1493.742793] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54300a6a-8185-4a8d-99e5-6df24ad7c305 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.229s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1493.796823] env[63371]: DEBUG nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Received event network-changed-e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1493.796954] env[63371]: DEBUG nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Refreshing instance network info cache due to event network-changed-e13a7d6d-6643-4b64-a4b1-2a59397c5307. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1493.797149] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Acquiring lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1493.797289] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Acquired lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1493.797464] env[63371]: DEBUG nova.network.neutron [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Refreshing network info cache for port e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1494.120081] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739f537b-62ee-4041-8a63-b5994f701e87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.129192] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19333a56-fd62-4c09-a159-1852e72a43f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.167417] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d4b82d-9906-44a2-94b8-e107a1b2f12c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.178719] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75ba0c5-dc6b-453d-bff9-d64f9d48f05c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.195862] env[63371]: DEBUG nova.compute.provider_tree [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1494.210917] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774007, 'name': CreateVM_Task, 'duration_secs': 0.514428} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.211907] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1494.212718] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.212898] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.213252] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1494.213769] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9de625ae-cf64-4b40-8973-640988932a21 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.219539] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1494.219539] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]529c75d8-0685-572a-10a7-708919600edd" [ 1494.219539] env[63371]: _type = "Task" [ 1494.219539] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.236555] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529c75d8-0685-572a-10a7-708919600edd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.280641] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1494.413144] env[63371]: DEBUG nova.network.neutron [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.702136] env[63371]: DEBUG nova.scheduler.client.report [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1494.731021] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529c75d8-0685-572a-10a7-708919600edd, 'name': SearchDatastore_Task, 'duration_secs': 0.017729} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1494.731375] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1494.731555] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1494.731830] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1494.732062] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1494.732189] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1494.732469] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bac07b53-2e6b-4ad7-a9b1-3f7e4c1f989e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.741750] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1494.741957] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1494.742733] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba6ae100-49a9-405e-b01d-af00a1c12342 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1494.749106] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1494.749106] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5249c16a-fca7-6361-70f2-09aa30eeee87" [ 1494.749106] env[63371]: _type = "Task" [ 1494.749106] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1494.758194] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5249c16a-fca7-6361-70f2-09aa30eeee87, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1494.775577] env[63371]: DEBUG nova.network.neutron [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updated VIF entry in instance network info cache for port e13a7d6d-6643-4b64-a4b1-2a59397c5307. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1494.775943] env[63371]: DEBUG nova.network.neutron [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1494.918202] env[63371]: DEBUG oslo_concurrency.lockutils [req-cec54947-b0a6-44b1-bd3a-35aea0a33031 req-4e990a9c-1626-4ee0-9198-a098bb8baa29 service nova] Releasing lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.033764] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Successfully updated port: f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1495.207961] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.126s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.211936] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.075s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.212968] env[63371]: DEBUG nova.objects.instance [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lazy-loading 'resources' on Instance uuid 201a2d1e-9e2c-4c07-92be-200408874ad4 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1495.219105] env[63371]: DEBUG nova.compute.manager [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1495.220022] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe96c7e1-81ab-497b-a8a8-a1ed668a39f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.242360] env[63371]: INFO nova.scheduler.client.report [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleted allocations for instance b5e259ea-d103-41c6-84b3-748813bb514d [ 1495.265073] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5249c16a-fca7-6361-70f2-09aa30eeee87, 'name': SearchDatastore_Task, 'duration_secs': 0.017189} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.266019] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2100890c-7a5b-4450-9044-9d1474c18b13 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.272395] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1495.272395] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5297c9bb-cc98-2d47-a774-a75c0988d21b" [ 1495.272395] env[63371]: _type = "Task" [ 1495.272395] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.281932] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Releasing lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.282362] env[63371]: DEBUG nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-vif-plugged-0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1495.282647] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Acquiring lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1495.282948] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1495.283175] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.283990] env[63371]: DEBUG nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] No waiting events found dispatching network-vif-plugged-0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1495.283990] env[63371]: WARNING nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received unexpected event network-vif-plugged-0fad7cfe-3cf5-4996-9d68-50d1919577c7 for instance with vm_state building and task_state spawning. [ 1495.283990] env[63371]: DEBUG nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-changed-0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1495.283990] env[63371]: DEBUG nova.compute.manager [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Refreshing instance network info cache due to event network-changed-0fad7cfe-3cf5-4996-9d68-50d1919577c7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1495.284344] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Acquiring lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.284413] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Acquired lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.284614] env[63371]: DEBUG nova.network.neutron [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Refreshing network info cache for port 0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1495.285814] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5297c9bb-cc98-2d47-a774-a75c0988d21b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.539510] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.735367] env[63371]: INFO nova.compute.manager [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] instance snapshotting [ 1495.735734] env[63371]: WARNING nova.compute.manager [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1495.741521] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a899a62f-82cc-465b-a0e2-b6f9b01810a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.758243] env[63371]: DEBUG oslo_concurrency.lockutils [None req-14a51e01-57d6-45a2-919f-d9acdfd0a71a tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "b5e259ea-d103-41c6-84b3-748813bb514d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.079s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1495.785181] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e42fa9b-e80c-430a-8c04-d364b42f3bea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.806884] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5297c9bb-cc98-2d47-a774-a75c0988d21b, 'name': SearchDatastore_Task, 'duration_secs': 0.013455} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1495.813963] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1495.815224] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1495.815454] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ab7e5e7-ef1b-4ea2-b33d-8cc8fe7c1106 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.824776] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1495.824776] env[63371]: value = "task-1774008" [ 1495.824776] env[63371]: _type = "Task" [ 1495.824776] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.835381] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774008, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1495.839136] env[63371]: DEBUG nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Received event network-changed-993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1495.839136] env[63371]: DEBUG nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Refreshing instance network info cache due to event network-changed-993ff886-27f6-48cd-be00-f0e8d292b060. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1495.839136] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Acquiring lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1495.839136] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Acquired lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1495.839136] env[63371]: DEBUG nova.network.neutron [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Refreshing network info cache for port 993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1495.910066] env[63371]: DEBUG nova.network.neutron [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1496.051520] env[63371]: DEBUG nova.network.neutron [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1496.294298] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebf20b37-85e5-4445-8134-88819a56d172 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.303028] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf439fa7-67a2-4bd7-8d96-a3e952c1a5d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.338399] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1496.339569] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bbaadeda-ccf7-4487-8b07-9beaf1ccd439 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.346022] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-257ae675-3e20-4f09-bfa4-ed203b956821 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.357827] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774008, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.362298] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1496.362298] env[63371]: value = "task-1774009" [ 1496.362298] env[63371]: _type = "Task" [ 1496.362298] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.364097] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba50473-87a1-4fe8-bc77-e0e0dc9719ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.391673] env[63371]: DEBUG nova.compute.provider_tree [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1496.394235] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774009, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.554211] env[63371]: DEBUG oslo_concurrency.lockutils [req-e493a5f5-7946-4696-a70e-37bbfe25dfc3 req-4fe95368-9cc5-417b-a54c-b02abdc8dab9 service nova] Releasing lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1496.554704] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquired lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1496.554883] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1496.724568] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278e13a-e67d-2d6f-34be-e2faca7779f3/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1496.725509] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f12c156-fc0c-457c-9f91-57a1a6fa45e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.732454] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278e13a-e67d-2d6f-34be-e2faca7779f3/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1496.732454] env[63371]: ERROR oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278e13a-e67d-2d6f-34be-e2faca7779f3/disk-0.vmdk due to incomplete transfer. [ 1496.732454] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d100461f-e1b4-4606-b81c-9a86bf8e16d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.740502] env[63371]: DEBUG oslo_vmware.rw_handles [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5278e13a-e67d-2d6f-34be-e2faca7779f3/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1496.740502] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Uploaded image f30456ca-8289-4e09-8051-20b3017ac4ca to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1496.741573] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1496.741830] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a05d5cdf-b6bc-4396-991c-53f13a9e4f54 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.747782] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1496.747782] env[63371]: value = "task-1774010" [ 1496.747782] env[63371]: _type = "Task" [ 1496.747782] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.755518] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774010, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.853739] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774008, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.711668} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1496.853739] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1496.853982] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1496.854239] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5ff61186-22d9-408b-83f1-f4e84fd9e9b8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.861997] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1496.861997] env[63371]: value = "task-1774011" [ 1496.861997] env[63371]: _type = "Task" [ 1496.861997] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.878447] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774011, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.880097] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774009, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.895456] env[63371]: DEBUG nova.scheduler.client.report [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1496.941487] env[63371]: DEBUG nova.network.neutron [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updated VIF entry in instance network info cache for port 993ff886-27f6-48cd-be00-f0e8d292b060. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1496.943456] env[63371]: DEBUG nova.network.neutron [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updating instance_info_cache with network_info: [{"id": "993ff886-27f6-48cd-be00-f0e8d292b060", "address": "fa:16:3e:14:89:81", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993ff886-27", "ovs_interfaceid": "993ff886-27f6-48cd-be00-f0e8d292b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.097155] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1497.264827] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774010, 'name': Destroy_Task, 'duration_secs': 0.435952} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.265415] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Destroyed the VM [ 1497.265814] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1497.266890] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5bd59f4b-e48f-4fee-8c57-a8e82d2f59ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.273862] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1497.273862] env[63371]: value = "task-1774012" [ 1497.273862] env[63371]: _type = "Task" [ 1497.273862] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.284166] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774012, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.377105] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774011, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119717} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.381814] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1497.382281] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774009, 'name': CreateSnapshot_Task, 'duration_secs': 0.746016} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.385395] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22eaf231-49bb-423b-8119-83fb45824c8d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.388619] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1497.390976] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fe5e7b-3ed8-478c-8c9f-3c1a10abbe4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.407305] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.195s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.423734] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1497.427719] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.985s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.427950] env[63371]: DEBUG nova.objects.instance [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lazy-loading 'resources' on Instance uuid 855005ae-3b0e-4ad7-80cf-266075fc6d0f {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1497.429327] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d7da1c4-16d2-4e12-9a0b-1fa53cc469ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.452892] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Releasing lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1497.453173] env[63371]: DEBUG nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-vif-plugged-f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1497.453382] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Acquiring lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1497.453544] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1497.453698] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.453870] env[63371]: DEBUG nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] No waiting events found dispatching network-vif-plugged-f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1497.454175] env[63371]: WARNING nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received unexpected event network-vif-plugged-f99cf773-dc88-4581-961d-63fdebbf96ff for instance with vm_state building and task_state spawning. [ 1497.454175] env[63371]: DEBUG nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-changed-f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1497.454333] env[63371]: DEBUG nova.compute.manager [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Refreshing instance network info cache due to event network-changed-f99cf773-dc88-4581-961d-63fdebbf96ff. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1497.454495] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Acquiring lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1497.460966] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1497.460966] env[63371]: value = "task-1774013" [ 1497.460966] env[63371]: _type = "Task" [ 1497.460966] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.472719] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774013, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.472719] env[63371]: INFO nova.scheduler.client.report [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Deleted allocations for instance 201a2d1e-9e2c-4c07-92be-200408874ad4 [ 1497.784719] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774012, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.807685] env[63371]: DEBUG nova.network.neutron [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updating instance_info_cache with network_info: [{"id": "2691ba66-0c30-4f84-af20-63a2d5a37564", "address": "fa:16:3e:fc:f0:88", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2691ba66-0c", "ovs_interfaceid": "2691ba66-0c30-4f84-af20-63a2d5a37564", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0fad7cfe-3cf5-4996-9d68-50d1919577c7", "address": "fa:16:3e:46:df:60", "network": {"id": "471b81c3-7351-4064-a32c-f718bdf819c9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-590329125", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fad7cfe-3c", "ovs_interfaceid": "0fad7cfe-3cf5-4996-9d68-50d1919577c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f99cf773-dc88-4581-961d-63fdebbf96ff", "address": "fa:16:3e:86:be:16", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99cf773-dc", "ovs_interfaceid": "f99cf773-dc88-4581-961d-63fdebbf96ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1497.962702] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1497.963894] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-fbd5307f-352b-425b-b5f5-116c2e79d90e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.983202] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774013, 'name': ReconfigVM_Task, 'duration_secs': 0.403547} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.983358] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1497.983358] env[63371]: value = "task-1774014" [ 1497.983358] env[63371]: _type = "Task" [ 1497.983358] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.983774] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a4e033a-7caf-4b37-87dd-23034872ae6e tempest-ServersAdminNegativeTestJSON-699368914 tempest-ServersAdminNegativeTestJSON-699368914-project-member] Lock "201a2d1e-9e2c-4c07-92be-200408874ad4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.178s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1497.984795] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1497.985471] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fe98db69-a1fd-4ef6-9a02-0466c1bbb15f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.000790] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774014, 'name': CloneVM_Task} progress is 11%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.005115] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1498.005115] env[63371]: value = "task-1774015" [ 1498.005115] env[63371]: _type = "Task" [ 1498.005115] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.045870] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71edb401-b664-44d1-9aa0-3b156eb85a77 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.060255] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-307ebae5-524a-43ff-89b0-0356a17f48e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.094033] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e064ad7-0dfb-4070-b27c-b94f2cc1f831 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.102813] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9383297-e948-4b70-aecd-878529e9cab1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.118192] env[63371]: DEBUG nova.compute.provider_tree [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.287582] env[63371]: DEBUG oslo_vmware.api [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774012, 'name': RemoveSnapshot_Task, 'duration_secs': 0.682545} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.287582] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1498.287582] env[63371]: INFO nova.compute.manager [None req-aec34fc3-ae9c-4aed-8fd4-b73671aab243 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Took 16.15 seconds to snapshot the instance on the hypervisor. [ 1498.313023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Releasing lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1498.313023] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance network_info: |[{"id": "2691ba66-0c30-4f84-af20-63a2d5a37564", "address": "fa:16:3e:fc:f0:88", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2691ba66-0c", "ovs_interfaceid": "2691ba66-0c30-4f84-af20-63a2d5a37564", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0fad7cfe-3cf5-4996-9d68-50d1919577c7", "address": "fa:16:3e:46:df:60", "network": {"id": "471b81c3-7351-4064-a32c-f718bdf819c9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-590329125", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fad7cfe-3c", "ovs_interfaceid": "0fad7cfe-3cf5-4996-9d68-50d1919577c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f99cf773-dc88-4581-961d-63fdebbf96ff", "address": "fa:16:3e:86:be:16", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99cf773-dc", "ovs_interfaceid": "f99cf773-dc88-4581-961d-63fdebbf96ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1498.313023] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Acquired lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1498.313023] env[63371]: DEBUG nova.network.neutron [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Refreshing network info cache for port f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1498.313362] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:f0:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2691ba66-0c30-4f84-af20-63a2d5a37564', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:46:df:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0d2101e-2d93-4310-a242-af2d9ecdaf9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0fad7cfe-3cf5-4996-9d68-50d1919577c7', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:be:16', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f99cf773-dc88-4581-961d-63fdebbf96ff', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1498.324124] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Creating folder: Project (6919de51a2ef456db7a25d4cec1e26ad). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1498.328149] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4556ad0c-cb95-4de3-b876-7a1f190b32e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.336198] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Created folder: Project (6919de51a2ef456db7a25d4cec1e26ad) in parent group-v368199. [ 1498.336198] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Creating folder: Instances. Parent ref: group-v368335. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1498.336354] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-866183f7-d798-4e3b-87e5-eeb08f7cdf81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.346488] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Created folder: Instances in parent group-v368335. [ 1498.346761] env[63371]: DEBUG oslo.service.loopingcall [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1498.347245] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1498.347461] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85aa54b0-e32c-40aa-92b3-f9028c2a4ffa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.370331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.370331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.370331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.370331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.370331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.371318] env[63371]: INFO nova.compute.manager [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Terminating instance [ 1498.374587] env[63371]: DEBUG nova.compute.manager [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1498.375023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1498.376357] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4dd6486-b426-44e5-a7c6-4d886d8fb218 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.379903] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1498.379903] env[63371]: value = "task-1774018" [ 1498.379903] env[63371]: _type = "Task" [ 1498.379903] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.386202] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1498.387020] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48dc1638-cd68-4142-8aa4-ab6758cac91d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.392453] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774018, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.393849] env[63371]: DEBUG oslo_vmware.api [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1498.393849] env[63371]: value = "task-1774019" [ 1498.393849] env[63371]: _type = "Task" [ 1498.393849] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.401876] env[63371]: DEBUG oslo_vmware.api [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774019, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.496544] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774014, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.516533] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774015, 'name': Rename_Task, 'duration_secs': 0.304717} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.516872] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1498.517013] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2691f08e-d0bb-4548-90fb-359cc0a95925 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.523412] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1498.523412] env[63371]: value = "task-1774020" [ 1498.523412] env[63371]: _type = "Task" [ 1498.523412] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.536242] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774020, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.553983] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "36b81143-211f-4c77-854b-abe0d3f39ce4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.554354] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.554610] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "36b81143-211f-4c77-854b-abe0d3f39ce4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1498.554842] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1498.555466] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1498.557177] env[63371]: INFO nova.compute.manager [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Terminating instance [ 1498.559909] env[63371]: DEBUG nova.compute.manager [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1498.560114] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1498.561113] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16135074-1976-444f-ba0e-ca8ff06a5126 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.569825] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1498.570010] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e3fdd078-572d-4c96-afcf-090ab9740068 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.577162] env[63371]: DEBUG oslo_vmware.api [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1498.577162] env[63371]: value = "task-1774021" [ 1498.577162] env[63371]: _type = "Task" [ 1498.577162] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1498.587392] env[63371]: DEBUG oslo_vmware.api [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.621844] env[63371]: DEBUG nova.scheduler.client.report [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1498.896845] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774018, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1498.906506] env[63371]: DEBUG oslo_vmware.api [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774019, 'name': PowerOffVM_Task, 'duration_secs': 0.329697} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1498.906940] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1498.907268] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1498.907660] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af1c6d9c-6d02-49fe-a7b8-d080f214a389 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.995968] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1498.996164] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1498.996415] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleting the datastore file [datastore1] fb2ddd3e-7adc-4a34-8797-0e98fdf19379 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1499.000410] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c6066d1-0fda-4949-96cd-1d2ad77e0973 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.002697] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774014, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.008955] env[63371]: DEBUG oslo_vmware.api [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1499.008955] env[63371]: value = "task-1774023" [ 1499.008955] env[63371]: _type = "Task" [ 1499.008955] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.016085] env[63371]: DEBUG oslo_vmware.api [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.039143] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774020, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.092526] env[63371]: DEBUG oslo_vmware.api [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774021, 'name': PowerOffVM_Task, 'duration_secs': 0.186359} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.092914] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1499.093028] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1499.093340] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01237cc5-8628-4032-a356-61945c32a5d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.128223] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.700s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.130660] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 30.610s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.153458] env[63371]: DEBUG nova.network.neutron [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updated VIF entry in instance network info cache for port f99cf773-dc88-4581-961d-63fdebbf96ff. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1499.155636] env[63371]: DEBUG nova.network.neutron [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updating instance_info_cache with network_info: [{"id": "2691ba66-0c30-4f84-af20-63a2d5a37564", "address": "fa:16:3e:fc:f0:88", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2691ba66-0c", "ovs_interfaceid": "2691ba66-0c30-4f84-af20-63a2d5a37564", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "0fad7cfe-3cf5-4996-9d68-50d1919577c7", "address": "fa:16:3e:46:df:60", "network": {"id": "471b81c3-7351-4064-a32c-f718bdf819c9", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-590329125", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fad7cfe-3c", "ovs_interfaceid": "0fad7cfe-3cf5-4996-9d68-50d1919577c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f99cf773-dc88-4581-961d-63fdebbf96ff", "address": "fa:16:3e:86:be:16", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99cf773-dc", "ovs_interfaceid": "f99cf773-dc88-4581-961d-63fdebbf96ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1499.171551] env[63371]: INFO nova.scheduler.client.report [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted allocations for instance 855005ae-3b0e-4ad7-80cf-266075fc6d0f [ 1499.222902] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1499.223171] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1499.225833] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1499.225833] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1499.225833] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleting the datastore file [datastore1] 36b81143-211f-4c77-854b-abe0d3f39ce4 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1499.226217] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf97460a-27b4-4218-b9ab-a400bb809fe4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.233439] env[63371]: DEBUG oslo_vmware.api [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for the task: (returnval){ [ 1499.233439] env[63371]: value = "task-1774025" [ 1499.233439] env[63371]: _type = "Task" [ 1499.233439] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.247765] env[63371]: DEBUG oslo_vmware.api [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774025, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.398657] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774018, 'name': CreateVM_Task, 'duration_secs': 0.721836} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.398833] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1499.400135] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.400448] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.400925] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1499.401329] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b392abd-ee87-47c2-a56d-1049e70d8678 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.408176] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1499.408176] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520dcc41-55fc-8c48-110e-7a9db5997e85" [ 1499.408176] env[63371]: _type = "Task" [ 1499.408176] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.421041] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520dcc41-55fc-8c48-110e-7a9db5997e85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1499.504234] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774014, 'name': CloneVM_Task, 'duration_secs': 1.406816} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.504234] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Created linked-clone VM from snapshot [ 1499.504315] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ca6079-474c-4f97-bfdd-cd4c25817791 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.514875] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Uploading image 6287f359-692e-438d-8347-f0d2b27b0f80 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1499.522178] env[63371]: DEBUG oslo_vmware.api [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774023, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.291218} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.522427] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1499.522602] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1499.522769] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1499.522933] env[63371]: INFO nova.compute.manager [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1499.523188] env[63371]: DEBUG oslo.service.loopingcall [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.524343] env[63371]: DEBUG nova.compute.manager [-] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1499.524343] env[63371]: DEBUG nova.network.neutron [-] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1499.540984] env[63371]: DEBUG oslo_vmware.api [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774020, 'name': PowerOnVM_Task, 'duration_secs': 0.611429} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.540984] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1499.540984] env[63371]: INFO nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Took 8.85 seconds to spawn the instance on the hypervisor. [ 1499.540984] env[63371]: DEBUG nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1499.541820] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50690f67-221f-48ef-869d-3299ba4c2fe1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.546964] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1499.546964] env[63371]: value = "vm-368334" [ 1499.546964] env[63371]: _type = "VirtualMachine" [ 1499.546964] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1499.547205] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-50d5ff3b-362a-4473-9cc5-bdc506a44410 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.558273] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lease: (returnval){ [ 1499.558273] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522f92ff-a966-c0d5-20fe-e84c1314bf9e" [ 1499.558273] env[63371]: _type = "HttpNfcLease" [ 1499.558273] env[63371]: } obtained for exporting VM: (result){ [ 1499.558273] env[63371]: value = "vm-368334" [ 1499.558273] env[63371]: _type = "VirtualMachine" [ 1499.558273] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1499.558273] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the lease: (returnval){ [ 1499.558273] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522f92ff-a966-c0d5-20fe-e84c1314bf9e" [ 1499.558273] env[63371]: _type = "HttpNfcLease" [ 1499.558273] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1499.570753] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1499.570753] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522f92ff-a966-c0d5-20fe-e84c1314bf9e" [ 1499.570753] env[63371]: _type = "HttpNfcLease" [ 1499.570753] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1499.659453] env[63371]: DEBUG oslo_concurrency.lockutils [req-3ae12814-e1a6-4d10-8ebd-cbd835eeaeb8 req-436c5967-98b5-402a-889e-4d8e04d2f28d service nova] Releasing lock "refresh_cache-f8119ade-7018-4ad8-82fe-baa0a6753c64" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.685897] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609244ee-cc8e-4716-b3d9-80157202d33d tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "855005ae-3b0e-4ad7-80cf-266075fc6d0f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.794s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1499.726300] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1499.744920] env[63371]: DEBUG oslo_vmware.api [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Task: {'id': task-1774025, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.36188} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.745120] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1499.745308] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1499.745483] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1499.745650] env[63371]: INFO nova.compute.manager [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1499.745890] env[63371]: DEBUG oslo.service.loopingcall [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1499.746088] env[63371]: DEBUG nova.compute.manager [-] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1499.746183] env[63371]: DEBUG nova.network.neutron [-] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1499.921744] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520dcc41-55fc-8c48-110e-7a9db5997e85, 'name': SearchDatastore_Task, 'duration_secs': 0.024095} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1499.922196] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1499.922322] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1499.922528] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1499.922671] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1499.922878] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1499.923154] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-86de98df-e8b8-4677-a6df-4a04263b8781 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.933184] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1499.933379] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1499.934230] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05c99b54-7dcb-40f6-99ca-d90666a8b77f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.941542] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1499.941542] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5297d2e9-4286-2605-9fae-8f9dec6ba89d" [ 1499.941542] env[63371]: _type = "Task" [ 1499.941542] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1499.952149] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5297d2e9-4286-2605-9fae-8f9dec6ba89d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.075535] env[63371]: INFO nova.compute.manager [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Took 40.27 seconds to build instance. [ 1500.078026] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1500.078026] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522f92ff-a966-c0d5-20fe-e84c1314bf9e" [ 1500.078026] env[63371]: _type = "HttpNfcLease" [ 1500.078026] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1500.078518] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1500.078518] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522f92ff-a966-c0d5-20fe-e84c1314bf9e" [ 1500.078518] env[63371]: _type = "HttpNfcLease" [ 1500.078518] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1500.079238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7110824e-8825-4fa2-8527-fc8629409363 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.086609] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f452d8-7d4e-4261-4784-ca74218d81ce/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1500.086786] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f452d8-7d4e-4261-4784-ca74218d81ce/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1500.183020] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 47c1c242-d190-4523-8033-307c5a9b7535 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.183231] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 33cf00ea-3195-41cf-9b7a-a8e64496a122 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.183355] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e4608e3c-7083-42fa-b88c-8ee007ef7f60 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.183482] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e0369f27-68ea-49c4-8524-3dbbb3cde96e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.183604] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e912c210-3ae1-47ce-b9cd-afebf6195606 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.183713] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 76c861a7-30f2-40f4-b723-7912975f36f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.183818] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.183950] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance dc6ef0a7-1744-4b90-b385-913cb796f7d0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.184082] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.184217] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 44cc8606-24f5-4f6b-b96f-3559c9c3f06e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.184344] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e8bd5802-d2ff-4348-92d4-c23277f4eaeb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.184879] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance fb2ddd3e-7adc-4a34-8797-0e98fdf19379 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.184879] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 36b81143-211f-4c77-854b-abe0d3f39ce4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.184879] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 50d5eac1-0752-4089-948c-b04439df6f6c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.184879] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance be37eb1c-8582-4446-afd6-ae11a8cadf95 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.186191] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 64fc862c-a755-4cac-997b-7a8328638269 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.186191] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance dcf8063b-56eb-439c-bee5-139a1e157714 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1500.186191] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.186191] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance with task_state "deleting" is not being actively managed by this compute host but has allocations referencing this compute node (c079ebb1-2fa2-4df9-bdab-118e305653c1): {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocations during the task state transition. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1780}} [ 1500.186191] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e6cd62ce-f6d2-4e5b-acbc-7527a94e0932 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.186470] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 594ff846-8e3e-4882-8ddc-41f824a77a5c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.186470] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance b523486c-adae-4322-80be-1f3bf33ca192 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.186540] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance f8119ade-7018-4ad8-82fe-baa0a6753c64 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.186620] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1500.191314] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-535ea6bb-28b9-4472-b7b7-a4ef442dac6d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.227575] env[63371]: DEBUG nova.compute.manager [req-7bba1b98-41f9-439c-911e-75a52534c52f req-06b6fe47-2874-48b9-b02b-e1806049c9e0 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Received event network-vif-deleted-d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1500.227775] env[63371]: INFO nova.compute.manager [req-7bba1b98-41f9-439c-911e-75a52534c52f req-06b6fe47-2874-48b9-b02b-e1806049c9e0 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Neutron deleted interface d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9; detaching it from the instance and deleting it from the info cache [ 1500.227947] env[63371]: DEBUG nova.network.neutron [req-7bba1b98-41f9-439c-911e-75a52534c52f req-06b6fe47-2874-48b9-b02b-e1806049c9e0 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.252037] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1500.406014] env[63371]: DEBUG nova.network.neutron [-] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.457226] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5297d2e9-4286-2605-9fae-8f9dec6ba89d, 'name': SearchDatastore_Task, 'duration_secs': 0.032676} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.457226] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec539232-5788-4dcc-9c45-8b2ee96a5056 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.466880] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1500.466880] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52793717-6c47-1404-d387-5060e4de6ee3" [ 1500.466880] env[63371]: _type = "Task" [ 1500.466880] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.478897] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52793717-6c47-1404-d387-5060e4de6ee3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.579606] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fc4d865-beff-49d4-88e2-87d878da2e12 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.136s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1500.672444] env[63371]: DEBUG nova.network.neutron [-] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.692797] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1500.733130] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aafe4311-98ec-4926-bc05-3585727ae3f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.743059] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35acf661-79fc-4bc0-8b96-2a2b819998d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.782028] env[63371]: DEBUG nova.compute.manager [req-7bba1b98-41f9-439c-911e-75a52534c52f req-06b6fe47-2874-48b9-b02b-e1806049c9e0 service nova] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Detach interface failed, port_id=d4ac9418-864a-4adf-ab92-bb5c3dbb8ec9, reason: Instance fb2ddd3e-7adc-4a34-8797-0e98fdf19379 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1500.909352] env[63371]: INFO nova.compute.manager [-] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Took 1.39 seconds to deallocate network for instance. [ 1500.981065] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52793717-6c47-1404-d387-5060e4de6ee3, 'name': SearchDatastore_Task, 'duration_secs': 0.014696} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.981065] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1500.982025] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] f8119ade-7018-4ad8-82fe-baa0a6753c64/f8119ade-7018-4ad8-82fe-baa0a6753c64.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1500.982704] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ef9aaecd-2366-45ab-868e-ef0ac7e2bc7a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.993978] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1500.993978] env[63371]: value = "task-1774027" [ 1500.993978] env[63371]: _type = "Task" [ 1500.993978] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.004385] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774027, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.179182] env[63371]: INFO nova.compute.manager [-] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Took 1.43 seconds to deallocate network for instance. [ 1501.196278] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 6df9af10-0053-4696-920a-10ab2af67ef5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1501.205147] env[63371]: DEBUG nova.compute.manager [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1501.207016] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdcefffc-4952-45b3-99dc-220a53fa3fca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.418718] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.506580] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774027, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489281} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1501.506949] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] f8119ade-7018-4ad8-82fe-baa0a6753c64/f8119ade-7018-4ad8-82fe-baa0a6753c64.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1501.507340] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1501.507658] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1af9c49-8f74-4a0b-bad7-199a26025842 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.514197] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1501.514197] env[63371]: value = "task-1774028" [ 1501.514197] env[63371]: _type = "Task" [ 1501.514197] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1501.523138] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774028, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.691110] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.703725] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 195de525-1081-4db6-acf3-04a6d3eb142f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1501.719354] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1501.719646] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1501.723960] env[63371]: INFO nova.compute.manager [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] instance snapshotting [ 1501.725227] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-978334eb-7332-4a7f-b660-15fa4239f2f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1501.749682] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2465b541-d223-4c45-87c9-cd888f6a3b69 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.027959] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774028, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091706} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1502.027959] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1502.028953] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095efcaa-3292-4f71-8aa1-6f744bb6a4ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.063547] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] f8119ade-7018-4ad8-82fe-baa0a6753c64/f8119ade-7018-4ad8-82fe-baa0a6753c64.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1502.064487] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d081a2cf-0930-4ab4-9af9-4c3ca7fe7b0a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.090084] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1502.090084] env[63371]: value = "task-1774029" [ 1502.090084] env[63371]: _type = "Task" [ 1502.090084] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.100549] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774029, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.210802] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 0e2c8ced-198f-43be-9d41-703a7c590df4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1502.224349] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1502.264064] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1502.264403] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7686bc94-d1f2-4171-8bed-4ea42872dee4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1502.274781] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1502.274781] env[63371]: value = "task-1774030" [ 1502.274781] env[63371]: _type = "Task" [ 1502.274781] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1502.286118] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774030, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.288546] env[63371]: DEBUG nova.compute.manager [req-86c3a208-0060-4516-b687-f09aa1c67c10 req-f09a94d1-40a4-4935-a449-8193cb21a617 service nova] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Received event network-vif-deleted-767b2818-8eb3-4f76-8def-793f9f31a087 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1502.606214] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774029, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1502.715776] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 574121c4-c721-4d30-81ec-3f2310a7b6d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1502.750980] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1502.790668] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774030, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.102118] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774029, 'name': ReconfigVM_Task, 'duration_secs': 0.523414} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.102462] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Reconfigured VM instance instance-00000030 to attach disk [datastore1] f8119ade-7018-4ad8-82fe-baa0a6753c64/f8119ade-7018-4ad8-82fe-baa0a6753c64.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1503.103219] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54cf3a39-0100-43c0-a428-bb8cf91edc58 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.110178] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1503.110178] env[63371]: value = "task-1774031" [ 1503.110178] env[63371]: _type = "Task" [ 1503.110178] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.125504] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774031, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.219422] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1503.288209] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774030, 'name': CreateSnapshot_Task, 'duration_secs': 0.7318} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.288209] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1503.288583] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74827d2-79ae-48d2-a6f8-98b08e279ce2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.439016] env[63371]: DEBUG nova.compute.manager [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Stashing vm_state: active {{(pid=63371) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1503.630133] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774031, 'name': Rename_Task, 'duration_secs': 0.254779} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1503.630133] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1503.630133] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-610b85e7-275e-4efe-ac5e-b39c717b020f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.638810] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1503.638810] env[63371]: value = "task-1774032" [ 1503.638810] env[63371]: _type = "Task" [ 1503.638810] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.651034] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774032, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.723355] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 1c93487b-6d8f-424d-8b95-10bfb894c609 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1503.810945] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1503.811982] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-95bb3226-4b50-41ee-86b4-5c762acc5506 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1503.821400] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1503.821400] env[63371]: value = "task-1774033" [ 1503.821400] env[63371]: _type = "Task" [ 1503.821400] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1503.830323] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774033, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1503.967474] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1504.151431] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774032, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.229271] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance d6bc618e-33c9-4b45-b79f-afe6811acd4e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1504.229271] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1504.229271] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1504.333859] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774033, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.655394] env[63371]: DEBUG oslo_vmware.api [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774032, 'name': PowerOnVM_Task, 'duration_secs': 0.778069} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1504.655857] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1504.656192] env[63371]: INFO nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Took 16.59 seconds to spawn the instance on the hypervisor. [ 1504.656482] env[63371]: DEBUG nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1504.657545] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fca5c4-e532-48da-8a9d-560bc91d1dc9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.832246] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16510582-f232-4381-b811-b24fb7f91370 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.838595] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774033, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1504.844832] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf4dac19-cf5b-4eda-a8e6-b0daa5dac1ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.878282] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b8ba96-b63a-4f3e-ae80-7abc11307281 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.886120] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8615f2c3-b522-4550-846d-916bf17a19db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1504.901410] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1505.180831] env[63371]: INFO nova.compute.manager [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Took 46.24 seconds to build instance. [ 1505.335529] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774033, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.404684] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1505.683235] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba4ef6bf-7d3c-49c1-b416-869bc5569a35 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.806s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.838888] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774033, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1505.914644] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1505.914994] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.784s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.915259] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.367s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.916826] env[63371]: INFO nova.compute.claims [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1506.017979] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "f8119ade-7018-4ad8-82fe-baa0a6753c64" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.018228] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.018386] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1506.018602] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.018833] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1506.021505] env[63371]: INFO nova.compute.manager [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Terminating instance [ 1506.023420] env[63371]: DEBUG nova.compute.manager [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1506.023568] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1506.024409] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-614f0d94-8db6-4521-a1ef-c4673388b84f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.032266] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1506.032501] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11d48e4e-c6f6-4426-89ce-9e41d68385ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.043251] env[63371]: DEBUG oslo_vmware.api [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1506.043251] env[63371]: value = "task-1774037" [ 1506.043251] env[63371]: _type = "Task" [ 1506.043251] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.050267] env[63371]: DEBUG oslo_vmware.api [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774037, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.339421] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774033, 'name': CloneVM_Task, 'duration_secs': 2.379382} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.339736] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Created linked-clone VM from snapshot [ 1506.340516] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8e30946-6674-4042-92f2-65feff37720c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.348347] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Uploading image c308ba9c-513a-4658-a60c-4dcff19c7679 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1506.370790] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1506.370790] env[63371]: value = "vm-368339" [ 1506.370790] env[63371]: _type = "VirtualMachine" [ 1506.370790] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1506.371191] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-472d9192-83f0-4f28-ab14-5f0eac138da6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.378732] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lease: (returnval){ [ 1506.378732] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f849d7-ba90-75fb-976e-8712adefe706" [ 1506.378732] env[63371]: _type = "HttpNfcLease" [ 1506.378732] env[63371]: } obtained for exporting VM: (result){ [ 1506.378732] env[63371]: value = "vm-368339" [ 1506.378732] env[63371]: _type = "VirtualMachine" [ 1506.378732] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1506.378987] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the lease: (returnval){ [ 1506.378987] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f849d7-ba90-75fb-976e-8712adefe706" [ 1506.378987] env[63371]: _type = "HttpNfcLease" [ 1506.378987] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1506.385469] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1506.385469] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f849d7-ba90-75fb-976e-8712adefe706" [ 1506.385469] env[63371]: _type = "HttpNfcLease" [ 1506.385469] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1506.552429] env[63371]: DEBUG oslo_vmware.api [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774037, 'name': PowerOffVM_Task, 'duration_secs': 0.263944} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1506.552721] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1506.552920] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1506.553245] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbf0959a-5c32-4118-a6d9-cda2708ebc4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.729258] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1506.729514] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1506.729806] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Deleting the datastore file [datastore1] f8119ade-7018-4ad8-82fe-baa0a6753c64 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1506.730311] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e799907-0961-4780-9b21-6f8fec26a728 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.738285] env[63371]: DEBUG oslo_vmware.api [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1506.738285] env[63371]: value = "task-1774040" [ 1506.738285] env[63371]: _type = "Task" [ 1506.738285] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1506.747059] env[63371]: DEBUG oslo_vmware.api [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774040, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1506.887014] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1506.887014] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f849d7-ba90-75fb-976e-8712adefe706" [ 1506.887014] env[63371]: _type = "HttpNfcLease" [ 1506.887014] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1506.888543] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1506.888543] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f849d7-ba90-75fb-976e-8712adefe706" [ 1506.888543] env[63371]: _type = "HttpNfcLease" [ 1506.888543] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1506.888543] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e73fb79-7873-4e5e-a224-5b5d34372325 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1506.896982] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523717c8-0d3b-e5ae-7e1a-8948218acaf0/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1506.897374] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523717c8-0d3b-e5ae-7e1a-8948218acaf0/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1507.014720] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bcbf705a-70d3-4e51-b7c7-787e2a2d7e97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.249980] env[63371]: DEBUG oslo_vmware.api [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774040, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226555} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1507.252718] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1507.252718] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1507.252718] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1507.253203] env[63371]: INFO nova.compute.manager [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1507.253203] env[63371]: DEBUG oslo.service.loopingcall [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1507.255830] env[63371]: DEBUG nova.compute.manager [-] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1507.255945] env[63371]: DEBUG nova.network.neutron [-] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1507.496813] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90461a01-9620-464f-a7f9-54e3752ca5d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.506077] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41a98a3-9b29-459f-8af2-c0c61f84c171 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.544298] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e41b24d-0ce2-4d84-ae3a-4e30e4ac8b31 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.552363] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3efe6e1-cc09-45b4-8693-ea611acf2a7f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.569560] env[63371]: DEBUG nova.compute.provider_tree [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1507.625274] env[63371]: DEBUG nova.compute.manager [req-63fdaabf-8f6a-439b-8f86-6eea59a9da98 req-81ab80e6-ba28-47d7-9b69-774e08a232f2 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-vif-deleted-0fad7cfe-3cf5-4996-9d68-50d1919577c7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1507.625694] env[63371]: INFO nova.compute.manager [req-63fdaabf-8f6a-439b-8f86-6eea59a9da98 req-81ab80e6-ba28-47d7-9b69-774e08a232f2 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Neutron deleted interface 0fad7cfe-3cf5-4996-9d68-50d1919577c7; detaching it from the instance and deleting it from the info cache [ 1507.626095] env[63371]: DEBUG nova.network.neutron [req-63fdaabf-8f6a-439b-8f86-6eea59a9da98 req-81ab80e6-ba28-47d7-9b69-774e08a232f2 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updating instance_info_cache with network_info: [{"id": "2691ba66-0c30-4f84-af20-63a2d5a37564", "address": "fa:16:3e:fc:f0:88", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2691ba66-0c", "ovs_interfaceid": "2691ba66-0c30-4f84-af20-63a2d5a37564", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f99cf773-dc88-4581-961d-63fdebbf96ff", "address": "fa:16:3e:86:be:16", "network": {"id": "47a7ac11-9af0-4cc4-97b8-7b56496abf49", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79330910", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.52", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33bcfd29-ad69-41ad-8e7f-55c1a3cf2dce", "external-id": "nsx-vlan-transportzone-725", "segmentation_id": 725, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf99cf773-dc", "ovs_interfaceid": "f99cf773-dc88-4581-961d-63fdebbf96ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.074136] env[63371]: DEBUG nova.scheduler.client.report [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1508.130883] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf33477b-e01d-46a4-bfce-b78ece252fb5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.143519] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985d94b3-5996-457b-a880-4f646d898c8d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.180767] env[63371]: DEBUG nova.compute.manager [req-63fdaabf-8f6a-439b-8f86-6eea59a9da98 req-81ab80e6-ba28-47d7-9b69-774e08a232f2 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Detach interface failed, port_id=0fad7cfe-3cf5-4996-9d68-50d1919577c7, reason: Instance f8119ade-7018-4ad8-82fe-baa0a6753c64 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1508.374708] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f452d8-7d4e-4261-4784-ca74218d81ce/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1508.375810] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52a08e7-70a8-4a15-8723-6aa74bee3d21 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.385589] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f452d8-7d4e-4261-4784-ca74218d81ce/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1508.387655] env[63371]: ERROR oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f452d8-7d4e-4261-4784-ca74218d81ce/disk-0.vmdk due to incomplete transfer. [ 1508.387655] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-57a8c1fa-123c-40be-914c-86da8c5ce13c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.396806] env[63371]: DEBUG oslo_vmware.rw_handles [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f452d8-7d4e-4261-4784-ca74218d81ce/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1508.396806] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Uploaded image 6287f359-692e-438d-8347-f0d2b27b0f80 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1508.399410] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1508.399762] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d6b462e6-8270-4d05-9bee-103a9f613852 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1508.406921] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1508.406921] env[63371]: value = "task-1774041" [ 1508.406921] env[63371]: _type = "Task" [ 1508.406921] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1508.417395] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774041, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1508.582307] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1508.583117] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1508.589845] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.810s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1508.589845] env[63371]: INFO nova.compute.claims [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1508.835152] env[63371]: DEBUG nova.network.neutron [-] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1508.918555] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774041, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.095963] env[63371]: DEBUG nova.compute.utils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1509.097589] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1509.098182] env[63371]: DEBUG nova.network.neutron [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1509.173672] env[63371]: DEBUG nova.policy [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f85b2454eed34665b92a1ebc087353c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f98ab0107f5040139ef8be7c3ae22207', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1509.340609] env[63371]: INFO nova.compute.manager [-] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Took 2.08 seconds to deallocate network for instance. [ 1509.399899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.399899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.399899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "594ff846-8e3e-4882-8ddc-41f824a77a5c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.399899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1509.399899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1509.402103] env[63371]: INFO nova.compute.manager [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Terminating instance [ 1509.404381] env[63371]: DEBUG nova.compute.manager [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1509.404703] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1509.406036] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9504eaf-2ba3-4fd6-b288-827fe74bc072 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.419724] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1509.424204] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a653658-7066-4c00-8d01-b5c81ba710f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.426458] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774041, 'name': Destroy_Task, 'duration_secs': 0.643534} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1509.427271] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Destroyed the VM [ 1509.427716] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1509.428498] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-687c5f4f-9988-4544-aadb-bf2d9600b9f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.437123] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1509.437123] env[63371]: value = "task-1774044" [ 1509.437123] env[63371]: _type = "Task" [ 1509.437123] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.445377] env[63371]: DEBUG oslo_vmware.api [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774044, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.561314] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1509.561314] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1509.561314] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleting the datastore file [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1509.561314] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0193c44d-2d9a-4f7b-82d1-eeff6b71cf01 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1509.572394] env[63371]: DEBUG oslo_vmware.api [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1509.572394] env[63371]: value = "task-1774045" [ 1509.572394] env[63371]: _type = "Task" [ 1509.572394] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1509.580676] env[63371]: DEBUG oslo_vmware.api [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1509.606962] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1509.686377] env[63371]: DEBUG nova.network.neutron [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Successfully created port: 01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1509.699568] env[63371]: DEBUG nova.compute.manager [req-c59fa24b-c66c-4ff3-ae85-f3dd667b400c req-d66757b3-747a-4a3b-8c31-12977b0032f5 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-vif-deleted-f99cf773-dc88-4581-961d-63fdebbf96ff {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1509.699568] env[63371]: DEBUG nova.compute.manager [req-c59fa24b-c66c-4ff3-ae85-f3dd667b400c req-d66757b3-747a-4a3b-8c31-12977b0032f5 service nova] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Received event network-vif-deleted-2691ba66-0c30-4f84-af20-63a2d5a37564 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1509.852347] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1509.953751] env[63371]: DEBUG nova.compute.utils [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Cleaning up image 6287f359-692e-438d-8347-f0d2b27b0f80 {{(pid=63371) delete_image /opt/stack/nova/nova/compute/utils.py:1322}} [ 1510.083674] env[63371]: WARNING nova.virt.vmwareapi.vmops [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] In vmwareapi:vmops:_destroy_instance, exception while deleting the VM contents from the disk: oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Traceback (most recent call last): [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1113, in _destroy_instance [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] ds_util.file_delete(self._session, [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/nova/nova/virt/vmwareapi/ds_util.py", line 219, in file_delete [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] session._wait_for_task(file_delete_task) [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] return self.wait_for_task(task_ref) [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] return evt.wait() [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] result = hub.switch() [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] return self.greenlet.switch() [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] self.f(*self.args, **self.kw) [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] raise exceptions.translate_fault(task_info.error) [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore1] 594ff846-8e3e-4882-8ddc-41f824a77a5c [ 1510.083674] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] [ 1510.084417] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1510.084607] env[63371]: INFO nova.compute.manager [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Took 0.68 seconds to destroy the instance on the hypervisor. [ 1510.084810] env[63371]: DEBUG oslo.service.loopingcall [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1510.087712] env[63371]: DEBUG nova.compute.manager [-] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1510.088199] env[63371]: DEBUG nova.network.neutron [-] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1510.161512] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1510.161778] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1510.163433] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edb01281-84ed-4d40-b60c-914566edefd9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.172875] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22661a4-a50c-4b94-8762-b56f5e1f8671 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.205580] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53369d3d-2711-4b91-9183-aa37784b8df8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.215719] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3e43bb-a94a-497c-9a06-530f52219262 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.232127] env[63371]: DEBUG nova.compute.provider_tree [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1510.619569] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1510.656147] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1510.656484] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1510.656566] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1510.656712] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1510.656910] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1510.657010] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1510.657237] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1510.657424] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1510.657559] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1510.657728] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1510.658113] env[63371]: DEBUG nova.virt.hardware [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1510.658791] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7882b04-39ba-4804-9882-4d65f1e49bbd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.666793] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1510.670585] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0362d10b-e4e3-40a3-8d55-f53a69faba5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1510.735569] env[63371]: DEBUG nova.scheduler.client.report [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1510.907472] env[63371]: DEBUG nova.network.neutron [-] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1511.195013] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.240935] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.241941] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1511.244966] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.087s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.245249] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.248716] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.620s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.249606] env[63371]: INFO nova.compute.claims [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1511.276579] env[63371]: INFO nova.scheduler.client.report [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Deleted allocations for instance 50d5eac1-0752-4089-948c-b04439df6f6c [ 1511.414384] env[63371]: INFO nova.compute.manager [-] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Took 1.32 seconds to deallocate network for instance. [ 1511.484486] env[63371]: DEBUG oslo_concurrency.lockutils [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.514932] env[63371]: DEBUG nova.network.neutron [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Successfully updated port: 01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1511.734570] env[63371]: DEBUG nova.compute.manager [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Received event network-vif-deleted-3d978143-a770-4100-a97a-b0d9503712e0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1511.734965] env[63371]: DEBUG nova.compute.manager [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Received event network-vif-plugged-01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1511.735154] env[63371]: DEBUG oslo_concurrency.lockutils [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] Acquiring lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1511.735427] env[63371]: DEBUG oslo_concurrency.lockutils [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1511.735665] env[63371]: DEBUG oslo_concurrency.lockutils [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.735892] env[63371]: DEBUG nova.compute.manager [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] No waiting events found dispatching network-vif-plugged-01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1511.736138] env[63371]: WARNING nova.compute.manager [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Received unexpected event network-vif-plugged-01b878e5-651e-49f1-959f-7da17291c0bc for instance with vm_state building and task_state spawning. [ 1511.736371] env[63371]: DEBUG nova.compute.manager [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Received event network-changed-01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1511.736583] env[63371]: DEBUG nova.compute.manager [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Refreshing instance network info cache due to event network-changed-01b878e5-651e-49f1-959f-7da17291c0bc. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1511.736845] env[63371]: DEBUG oslo_concurrency.lockutils [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] Acquiring lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1511.737081] env[63371]: DEBUG oslo_concurrency.lockutils [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] Acquired lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1511.737296] env[63371]: DEBUG nova.network.neutron [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Refreshing network info cache for port 01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1511.755869] env[63371]: DEBUG nova.compute.utils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1511.759041] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1511.759041] env[63371]: DEBUG nova.network.neutron [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1511.783359] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5c0c4847-4439-4a4f-9138-c17e37e4a716 tempest-ServerDiagnosticsV248Test-383719471 tempest-ServerDiagnosticsV248Test-383719471-project-member] Lock "50d5eac1-0752-4089-948c-b04439df6f6c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.392s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1511.863141] env[63371]: DEBUG nova.policy [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ef97c1a9a174c1888972e6f281eecbe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2601d597b4d64481ace490d56d1056a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1511.920090] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1512.021889] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1512.266425] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1512.297458] env[63371]: DEBUG nova.network.neutron [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1512.470727] env[63371]: DEBUG nova.network.neutron [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1512.511876] env[63371]: DEBUG nova.network.neutron [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Successfully created port: a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1512.920519] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b5c3f1-20e4-44d3-ade1-74e4e2d4e105 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.929913] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e1c9d6-b030-4de0-8ac0-e3830fea01f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.975103] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d372f768-3759-4d59-90d7-146fb6014f5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1512.979399] env[63371]: DEBUG oslo_concurrency.lockutils [req-5c06b068-2445-4fd3-8ee2-75a81db30bfe req-1633258b-ca30-4bbd-be14-19ed03e606d7 service nova] Releasing lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1512.979890] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1512.980077] env[63371]: DEBUG nova.network.neutron [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1512.987501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42dca31b-b230-49d0-b73c-01983b34c1c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.007068] env[63371]: DEBUG nova.compute.provider_tree [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1513.293874] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1513.329280] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1513.329951] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1513.329951] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1513.330108] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1513.330314] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1513.330509] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1513.330993] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1513.330993] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1513.331212] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1513.331425] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1513.331653] env[63371]: DEBUG nova.virt.hardware [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1513.333370] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57e5eef-8bac-413c-ae3a-8c87c7d9bc76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.345797] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1a1405-7401-4160-bdac-af25c81c5fe5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1513.512117] env[63371]: DEBUG nova.scheduler.client.report [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1513.548480] env[63371]: DEBUG nova.network.neutron [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1513.790980] env[63371]: DEBUG nova.network.neutron [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [{"id": "01b878e5-651e-49f1-959f-7da17291c0bc", "address": "fa:16:3e:b7:c4:0c", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b878e5-65", "ovs_interfaceid": "01b878e5-651e-49f1-959f-7da17291c0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1514.020168] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.770s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.020168] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1514.022615] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.259s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.022815] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.025083] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.503s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.025314] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.027121] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.069s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.028785] env[63371]: INFO nova.compute.claims [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1514.065323] env[63371]: INFO nova.scheduler.client.report [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted allocations for instance 64fc862c-a755-4cac-997b-7a8328638269 [ 1514.071135] env[63371]: INFO nova.scheduler.client.report [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Deleted allocations for instance be37eb1c-8582-4446-afd6-ae11a8cadf95 [ 1514.296153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1514.296512] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Instance network_info: |[{"id": "01b878e5-651e-49f1-959f-7da17291c0bc", "address": "fa:16:3e:b7:c4:0c", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b878e5-65", "ovs_interfaceid": "01b878e5-651e-49f1-959f-7da17291c0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1514.296938] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:c4:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01b878e5-651e-49f1-959f-7da17291c0bc', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1514.309225] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Creating folder: Project (f98ab0107f5040139ef8be7c3ae22207). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1514.309225] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-76b58762-966d-437f-9b30-72eccbf65d23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.320597] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Created folder: Project (f98ab0107f5040139ef8be7c3ae22207) in parent group-v368199. [ 1514.320839] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Creating folder: Instances. Parent ref: group-v368343. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1514.321135] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-359981e7-4a4f-4b47-b833-552e52ca4c5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.330734] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Created folder: Instances in parent group-v368343. [ 1514.331176] env[63371]: DEBUG oslo.service.loopingcall [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1514.331350] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1514.331585] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8eb75b03-0d9e-400d-b135-5a00a8031336 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.354905] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1514.354905] env[63371]: value = "task-1774050" [ 1514.354905] env[63371]: _type = "Task" [ 1514.354905] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.366316] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774050, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.537454] env[63371]: DEBUG nova.compute.utils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1514.539203] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1514.539549] env[63371]: DEBUG nova.network.neutron [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1514.577235] env[63371]: DEBUG oslo_concurrency.lockutils [None req-be844fb4-ad96-425e-a000-431202869bbc tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "64fc862c-a755-4cac-997b-7a8328638269" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.115s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.584020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-12acc297-4a91-44a3-95c8-f040aefa32a3 tempest-InstanceActionsTestJSON-1503006778 tempest-InstanceActionsTestJSON-1503006778-project-member] Lock "be37eb1c-8582-4446-afd6-ae11a8cadf95" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.874s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.672031] env[63371]: DEBUG nova.policy [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b22696705ee840cb8ecd18e5abcec19c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b5c5bf80b8e64c8795da4d79d6a89150', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1514.679950] env[63371]: DEBUG nova.compute.manager [req-aa4bb03c-7f64-437a-8518-8e41ff67653f req-6e28fad2-f9ba-4f60-9cba-d42b85749a1f service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Received event network-vif-plugged-a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1514.680226] env[63371]: DEBUG oslo_concurrency.lockutils [req-aa4bb03c-7f64-437a-8518-8e41ff67653f req-6e28fad2-f9ba-4f60-9cba-d42b85749a1f service nova] Acquiring lock "6df9af10-0053-4696-920a-10ab2af67ef5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.680376] env[63371]: DEBUG oslo_concurrency.lockutils [req-aa4bb03c-7f64-437a-8518-8e41ff67653f req-6e28fad2-f9ba-4f60-9cba-d42b85749a1f service nova] Lock "6df9af10-0053-4696-920a-10ab2af67ef5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1514.680565] env[63371]: DEBUG oslo_concurrency.lockutils [req-aa4bb03c-7f64-437a-8518-8e41ff67653f req-6e28fad2-f9ba-4f60-9cba-d42b85749a1f service nova] Lock "6df9af10-0053-4696-920a-10ab2af67ef5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1514.680752] env[63371]: DEBUG nova.compute.manager [req-aa4bb03c-7f64-437a-8518-8e41ff67653f req-6e28fad2-f9ba-4f60-9cba-d42b85749a1f service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] No waiting events found dispatching network-vif-plugged-a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1514.680933] env[63371]: WARNING nova.compute.manager [req-aa4bb03c-7f64-437a-8518-8e41ff67653f req-6e28fad2-f9ba-4f60-9cba-d42b85749a1f service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Received unexpected event network-vif-plugged-a7788c55-6aa0-4056-b8d1-cff8ad8951f7 for instance with vm_state building and task_state spawning. [ 1514.871740] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774050, 'name': CreateVM_Task, 'duration_secs': 0.428887} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1514.872376] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1514.873037] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1514.873222] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1514.874490] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1514.874490] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2905d5ce-99f1-4dda-9ede-e5c472cf7572 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1514.880612] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1514.880612] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528dac5b-0991-ced3-c1af-de66d313d0c8" [ 1514.880612] env[63371]: _type = "Task" [ 1514.880612] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1514.889922] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528dac5b-0991-ced3-c1af-de66d313d0c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1514.902140] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "e05c7187-b4d6-481e-8bce-deb557dde6a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1514.902387] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "e05c7187-b4d6-481e-8bce-deb557dde6a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1515.045340] env[63371]: DEBUG nova.compute.utils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1515.290430] env[63371]: DEBUG nova.network.neutron [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Successfully updated port: a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1515.401237] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528dac5b-0991-ced3-c1af-de66d313d0c8, 'name': SearchDatastore_Task, 'duration_secs': 0.011534} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.401237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1515.401237] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1515.401237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.401237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.401592] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1515.401592] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6748be3-6683-415a-afb7-99a4737c8ce9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.412677] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1515.412677] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1515.412677] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-783695b5-7fb2-4f9c-8d96-69202bf8ef06 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.418525] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1515.418525] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5225fae1-d233-4185-f27e-410b6f6a3659" [ 1515.418525] env[63371]: _type = "Task" [ 1515.418525] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.431294] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5225fae1-d233-4185-f27e-410b6f6a3659, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1515.552652] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1515.583556] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8e2a7b-6811-4799-ad51-0a170bdd95c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.594082] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46833eda-f99f-4e04-89f9-31dc17a92374 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.639088] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cd6e38-4964-4d1c-9870-ab5fb7b3a91b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.656290] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca32e50a-c5b1-4fac-962b-0bcf41887f0b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.677137] env[63371]: DEBUG nova.compute.provider_tree [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1515.729448] env[63371]: DEBUG nova.network.neutron [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Successfully created port: 8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1515.793805] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1515.793805] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1515.794296] env[63371]: DEBUG nova.network.neutron [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1515.934633] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5225fae1-d233-4185-f27e-410b6f6a3659, 'name': SearchDatastore_Task, 'duration_secs': 0.011747} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1515.935683] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7691755-c290-4430-985b-c6f808fad47f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1515.945553] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1515.945553] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]529db7a3-a5bc-90a5-678c-bb171cffde01" [ 1515.945553] env[63371]: _type = "Task" [ 1515.945553] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1515.955946] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529db7a3-a5bc-90a5-678c-bb171cffde01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.182598] env[63371]: DEBUG nova.scheduler.client.report [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1516.355455] env[63371]: DEBUG nova.network.neutron [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1516.431222] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523717c8-0d3b-e5ae-7e1a-8948218acaf0/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1516.431222] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df29b4bc-95cb-43d4-b188-1ecebe854b3e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.441451] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523717c8-0d3b-e5ae-7e1a-8948218acaf0/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1516.442293] env[63371]: ERROR oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523717c8-0d3b-e5ae-7e1a-8948218acaf0/disk-0.vmdk due to incomplete transfer. [ 1516.442293] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1621101c-cf38-4adb-bcff-f644855fbae5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.453855] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529db7a3-a5bc-90a5-678c-bb171cffde01, 'name': SearchDatastore_Task, 'duration_secs': 0.017132} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1516.456811] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1516.456811] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec/88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1516.456811] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3593896-8ff1-4e45-9df2-2c316f2d02b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.462469] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1516.462469] env[63371]: value = "task-1774052" [ 1516.462469] env[63371]: _type = "Task" [ 1516.462469] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.465716] env[63371]: DEBUG oslo_vmware.rw_handles [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523717c8-0d3b-e5ae-7e1a-8948218acaf0/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1516.466214] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Uploaded image c308ba9c-513a-4658-a60c-4dcff19c7679 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1516.467915] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1516.468626] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-de48b43b-8227-4f09-8a9d-1be0e81c191b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.477557] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.482919] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1516.482919] env[63371]: value = "task-1774053" [ 1516.482919] env[63371]: _type = "Task" [ 1516.482919] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1516.494251] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774053, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1516.571019] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1516.571019] env[63371]: DEBUG nova.network.neutron [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updating instance_info_cache with network_info: [{"id": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "address": "fa:16:3e:8d:c6:ca", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7788c55-6a", "ovs_interfaceid": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1516.597959] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:31:00Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='431901221',id=18,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-933523965',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1516.598317] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1516.598530] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1516.598772] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1516.598997] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1516.599248] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1516.599566] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1516.599726] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1516.599930] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1516.600174] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1516.600395] env[63371]: DEBUG nova.virt.hardware [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1516.601840] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c146b5b-2a8c-4c06-b903-bb973c41c08e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.610958] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0419e7e5-5b4f-416d-bbc1-61ec5dbd47b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1516.702737] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.702737] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1516.706625] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.356s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.706840] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.712951] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.171s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.712951] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.715498] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.923s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.715498] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.717706] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.487s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.717706] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1516.719720] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.080s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1516.721856] env[63371]: INFO nova.compute.claims [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1516.769570] env[63371]: INFO nova.scheduler.client.report [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Deleted allocations for instance dcf8063b-56eb-439c-bee5-139a1e157714 [ 1516.773879] env[63371]: INFO nova.scheduler.client.report [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Deleted allocations for instance dc6ef0a7-1744-4b90-b385-913cb796f7d0 [ 1516.840989] env[63371]: INFO nova.scheduler.client.report [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleted allocations for instance 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05 [ 1516.925640] env[63371]: INFO nova.scheduler.client.report [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Deleted allocations for instance 33cf00ea-3195-41cf-9b7a-a8e64496a122 [ 1516.974892] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774052, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.003624] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774053, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.076337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1517.076745] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Instance network_info: |[{"id": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "address": "fa:16:3e:8d:c6:ca", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7788c55-6a", "ovs_interfaceid": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1517.077228] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:c6:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7788c55-6aa0-4056-b8d1-cff8ad8951f7', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1517.086583] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Creating folder: Project (2601d597b4d64481ace490d56d1056a6). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1517.087291] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db58ab48-52ad-4258-b3ca-deee2c00bc5b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.098960] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Created folder: Project (2601d597b4d64481ace490d56d1056a6) in parent group-v368199. [ 1517.099111] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Creating folder: Instances. Parent ref: group-v368347. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1517.099571] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-146ed245-fe96-4c10-91f6-1d4280dc1f1f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.103575] env[63371]: DEBUG nova.compute.manager [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Received event network-changed-a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1517.103780] env[63371]: DEBUG nova.compute.manager [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Refreshing instance network info cache due to event network-changed-a7788c55-6aa0-4056-b8d1-cff8ad8951f7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1517.103981] env[63371]: DEBUG oslo_concurrency.lockutils [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] Acquiring lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1517.104330] env[63371]: DEBUG oslo_concurrency.lockutils [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] Acquired lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1517.104399] env[63371]: DEBUG nova.network.neutron [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Refreshing network info cache for port a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1517.115123] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Created folder: Instances in parent group-v368347. [ 1517.115378] env[63371]: DEBUG oslo.service.loopingcall [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1517.115569] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1517.115784] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-887f0fcd-f779-4409-9d13-5cba55ee9cf9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.138022] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1517.138022] env[63371]: value = "task-1774056" [ 1517.138022] env[63371]: _type = "Task" [ 1517.138022] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.151429] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774056, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.229335] env[63371]: DEBUG nova.compute.utils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1517.233784] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1517.235964] env[63371]: DEBUG nova.network.neutron [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1517.293412] env[63371]: DEBUG oslo_concurrency.lockutils [None req-369dc9d0-a17e-4899-9324-735c7f59ce75 tempest-MultipleCreateTestJSON-789867973 tempest-MultipleCreateTestJSON-789867973-project-member] Lock "dcf8063b-56eb-439c-bee5-139a1e157714" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.595s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.294695] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ccac412-9711-4e37-ab21-12d5fbf982dc tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "dc6ef0a7-1744-4b90-b385-913cb796f7d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.544s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.353172] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54e5f70a-fcf3-44a8-a54b-823be151b828 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "7e463dd7-84a6-4e6d-ae8f-0860e3a20f05" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.575s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.372625] env[63371]: DEBUG nova.policy [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'acdd9325a8b0496aad20c5dbd1c37ff1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2fc50868ddcf4193beb9b3a8a37f97b4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1517.435271] env[63371]: DEBUG oslo_concurrency.lockutils [None req-034144cd-5aad-4e61-bdfc-190cdcd6d2b8 tempest-ServerRescueTestJSON-1718412976 tempest-ServerRescueTestJSON-1718412976-project-member] Lock "33cf00ea-3195-41cf-9b7a-a8e64496a122" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.369s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.472538] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774052, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57651} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.472840] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec/88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1517.473023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1517.473586] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-26b97ad5-8625-463f-bfb4-50e3fba5f549 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.480176] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1517.480176] env[63371]: value = "task-1774058" [ 1517.480176] env[63371]: _type = "Task" [ 1517.480176] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.490949] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774058, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.496414] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774053, 'name': Destroy_Task, 'duration_secs': 0.586963} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.496568] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Destroyed the VM [ 1517.496800] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1517.497065] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-378dcbcb-b406-48eb-bfb3-6cb77555c9b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.507246] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1517.507246] env[63371]: value = "task-1774059" [ 1517.507246] env[63371]: _type = "Task" [ 1517.507246] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.520144] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774059, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.652056] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774056, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.677203] env[63371]: DEBUG nova.network.neutron [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Successfully updated port: 8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1517.738883] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1517.824514] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "76c861a7-30f2-40f4-b723-7912975f36f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.824796] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "76c861a7-30f2-40f4-b723-7912975f36f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.825010] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "76c861a7-30f2-40f4-b723-7912975f36f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1517.825206] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "76c861a7-30f2-40f4-b723-7912975f36f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1517.825378] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "76c861a7-30f2-40f4-b723-7912975f36f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1517.831122] env[63371]: INFO nova.compute.manager [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Terminating instance [ 1517.832156] env[63371]: DEBUG nova.compute.manager [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1517.832480] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1517.833305] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ffed3f-8214-4933-901d-dedba76c5adf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.841046] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1517.843435] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82ef5d6c-292a-4da3-896c-d616f28fa199 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1517.852410] env[63371]: DEBUG oslo_vmware.api [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1517.852410] env[63371]: value = "task-1774060" [ 1517.852410] env[63371]: _type = "Task" [ 1517.852410] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1517.865100] env[63371]: DEBUG oslo_vmware.api [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1774060, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1517.992147] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774058, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090352} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1517.992147] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1517.993107] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c8be1d-81e4-46f4-a459-61ab53979611 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.017729] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec/88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1518.025159] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d229367-84da-421c-880e-5083cd8c94a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.050747] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774059, 'name': RemoveSnapshot_Task} progress is 26%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.051067] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1518.051067] env[63371]: value = "task-1774061" [ 1518.051067] env[63371]: _type = "Task" [ 1518.051067] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.057397] env[63371]: DEBUG nova.network.neutron [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Successfully created port: d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1518.067326] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774061, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.147632] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774056, 'name': CreateVM_Task, 'duration_secs': 0.601064} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.150243] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1518.151256] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.151911] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.151911] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1518.152230] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98d41a33-31fb-4473-82e6-bcb843fa8fb2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.157019] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1518.157019] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52725138-497f-54eb-aac3-374b9acbd605" [ 1518.157019] env[63371]: _type = "Task" [ 1518.157019] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.169268] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52725138-497f-54eb-aac3-374b9acbd605, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.179917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.180039] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.180195] env[63371]: DEBUG nova.network.neutron [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1518.260788] env[63371]: DEBUG nova.network.neutron [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updated VIF entry in instance network info cache for port a7788c55-6aa0-4056-b8d1-cff8ad8951f7. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1518.261203] env[63371]: DEBUG nova.network.neutron [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updating instance_info_cache with network_info: [{"id": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "address": "fa:16:3e:8d:c6:ca", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7788c55-6a", "ovs_interfaceid": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1518.352699] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2adc22-c58f-450a-9d24-8756e102a018 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.367061] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7bb6977-eb38-4c67-a01a-ef18bc47ebee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.371960] env[63371]: DEBUG oslo_vmware.api [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1774060, 'name': PowerOffVM_Task, 'duration_secs': 0.199667} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.371960] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1518.371960] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1518.373780] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85eec6ad-ea9e-41a3-835c-c18958936c89 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.402522] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e98c15d7-0938-4340-b478-3450e72bdf66 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.410399] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702a0e49-9281-4c37-84c0-b2bd36dbf554 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.427803] env[63371]: DEBUG nova.compute.provider_tree [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1518.498346] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1518.498566] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1518.498818] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Deleting the datastore file [datastore1] 76c861a7-30f2-40f4-b723-7912975f36f8 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1518.499105] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7e292f5-e078-491f-803c-c9398f1afaf8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.506194] env[63371]: DEBUG oslo_vmware.api [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for the task: (returnval){ [ 1518.506194] env[63371]: value = "task-1774063" [ 1518.506194] env[63371]: _type = "Task" [ 1518.506194] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.517581] env[63371]: DEBUG oslo_vmware.api [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1774063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.520484] env[63371]: DEBUG oslo_vmware.api [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774059, 'name': RemoveSnapshot_Task, 'duration_secs': 0.632125} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.520745] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1518.520980] env[63371]: INFO nova.compute.manager [None req-3d4b3550-a242-41bb-80a6-750316091bb9 tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Took 16.80 seconds to snapshot the instance on the hypervisor. [ 1518.562317] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774061, 'name': ReconfigVM_Task, 'duration_secs': 0.302614} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.562317] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec/88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1518.562317] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-613cc7bc-2b4f-4ec5-b383-a9c4fa0681f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.573316] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1518.573316] env[63371]: value = "task-1774064" [ 1518.573316] env[63371]: _type = "Task" [ 1518.573316] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.583446] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774064, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.672251] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52725138-497f-54eb-aac3-374b9acbd605, 'name': SearchDatastore_Task, 'duration_secs': 0.010766} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1518.672568] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.672806] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1518.673053] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1518.673186] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1518.673387] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1518.673656] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f0586cd-3397-4768-9e64-077b703688d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.685470] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1518.685651] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1518.686506] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edbed72f-0bf9-4d01-bc5d-8fd551f537c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.695663] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1518.695663] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b8adee-5dc8-b8f8-a4ae-445c69767fd2" [ 1518.695663] env[63371]: _type = "Task" [ 1518.695663] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1518.704678] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b8adee-5dc8-b8f8-a4ae-445c69767fd2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1518.754793] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1518.764751] env[63371]: DEBUG oslo_concurrency.lockutils [req-291eaa46-9902-4a2f-9b8f-0fbba38b78e7 req-6a8aa195-1256-441d-9456-ee2073a73ad5 service nova] Releasing lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1518.789644] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1518.789894] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1518.790227] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1518.790227] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1518.790472] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1518.790513] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1518.791077] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1518.791077] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1518.791077] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1518.791234] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1518.791336] env[63371]: DEBUG nova.virt.hardware [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1518.792249] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8daf980-a102-48ad-8c78-aa1945679746 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.796203] env[63371]: DEBUG nova.network.neutron [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1518.802491] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3678dabe-8942-4a1f-8288-ab965e5b8e03 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1518.931016] env[63371]: DEBUG nova.scheduler.client.report [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1519.017023] env[63371]: DEBUG oslo_vmware.api [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Task: {'id': task-1774063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144332} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.017023] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1519.017023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1519.017208] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1519.017375] env[63371]: INFO nova.compute.manager [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1519.017613] env[63371]: DEBUG oslo.service.loopingcall [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.018051] env[63371]: DEBUG nova.compute.manager [-] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1519.018051] env[63371]: DEBUG nova.network.neutron [-] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1519.084709] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774064, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.205607] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b8adee-5dc8-b8f8-a4ae-445c69767fd2, 'name': SearchDatastore_Task, 'duration_secs': 0.0102} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.206421] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a7ec3e72-2e1b-4f5c-b6af-78633825f772 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.212570] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1519.212570] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5290dbae-e89a-dca0-6abf-856afe6b3e0f" [ 1519.212570] env[63371]: _type = "Task" [ 1519.212570] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.221019] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5290dbae-e89a-dca0-6abf-856afe6b3e0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.435611] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.716s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.436173] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1519.439617] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.286s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.441162] env[63371]: INFO nova.compute.claims [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1519.471057] env[63371]: DEBUG nova.network.neutron [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Updating instance_info_cache with network_info: [{"id": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "address": "fa:16:3e:33:87:bc", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e9291f7-15", "ovs_interfaceid": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1519.588993] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774064, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.724069] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5290dbae-e89a-dca0-6abf-856afe6b3e0f, 'name': SearchDatastore_Task, 'duration_secs': 0.036504} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1519.724352] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.724614] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 6df9af10-0053-4696-920a-10ab2af67ef5/6df9af10-0053-4696-920a-10ab2af67ef5.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1519.724991] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0d0ad59-84b9-4360-8924-4a3d15be9337 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1519.731586] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1519.731586] env[63371]: value = "task-1774066" [ 1519.731586] env[63371]: _type = "Task" [ 1519.731586] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1519.741164] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1519.860828] env[63371]: DEBUG nova.compute.manager [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Received event network-vif-plugged-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1519.861281] env[63371]: DEBUG oslo_concurrency.lockutils [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] Acquiring lock "195de525-1081-4db6-acf3-04a6d3eb142f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1519.861513] env[63371]: DEBUG oslo_concurrency.lockutils [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] Lock "195de525-1081-4db6-acf3-04a6d3eb142f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1519.861655] env[63371]: DEBUG oslo_concurrency.lockutils [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] Lock "195de525-1081-4db6-acf3-04a6d3eb142f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1519.861852] env[63371]: DEBUG nova.compute.manager [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] No waiting events found dispatching network-vif-plugged-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1519.862216] env[63371]: WARNING nova.compute.manager [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Received unexpected event network-vif-plugged-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 for instance with vm_state building and task_state spawning. [ 1519.862457] env[63371]: DEBUG nova.compute.manager [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Received event network-changed-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1519.864132] env[63371]: DEBUG nova.compute.manager [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Refreshing instance network info cache due to event network-changed-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1519.864132] env[63371]: DEBUG oslo_concurrency.lockutils [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] Acquiring lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1519.956160] env[63371]: DEBUG nova.compute.utils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1519.961189] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1519.961370] env[63371]: DEBUG nova.network.neutron [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1519.974411] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Releasing lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1519.974788] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Instance network_info: |[{"id": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "address": "fa:16:3e:33:87:bc", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e9291f7-15", "ovs_interfaceid": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1519.975164] env[63371]: DEBUG oslo_concurrency.lockutils [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] Acquired lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1519.975384] env[63371]: DEBUG nova.network.neutron [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Refreshing network info cache for port 8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1519.979273] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:87:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9d50784-eb90-48ae-a4ea-2125c52a50d7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8e9291f7-154c-4bfa-bfd8-f09dbd9b4963', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1519.986945] env[63371]: DEBUG oslo.service.loopingcall [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1519.989401] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1519.989943] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67930fd7-7b24-4931-aefe-b0c7434f2986 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.013836] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1520.013836] env[63371]: value = "task-1774067" [ 1520.013836] env[63371]: _type = "Task" [ 1520.013836] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.024220] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774067, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.088875] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774064, 'name': Rename_Task, 'duration_secs': 1.162097} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.088875] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1520.088875] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd935257-d079-47a3-922c-3b5944ad1090 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.099226] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1520.099226] env[63371]: value = "task-1774068" [ 1520.099226] env[63371]: _type = "Task" [ 1520.099226] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.109013] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774068, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.160065] env[63371]: DEBUG nova.policy [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31b76ca90f31495287b332ebb3001dff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e96348bcfea1455dad72945c7c36f027', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1520.242490] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504689} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.242764] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 6df9af10-0053-4696-920a-10ab2af67ef5/6df9af10-0053-4696-920a-10ab2af67ef5.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1520.242979] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1520.243257] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b727f5da-8fdc-4869-8af2-c8117100d9db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.249785] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1520.249785] env[63371]: value = "task-1774069" [ 1520.249785] env[63371]: _type = "Task" [ 1520.249785] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.259051] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774069, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.460588] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1520.534503] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774067, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.614409] env[63371]: DEBUG oslo_vmware.api [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774068, 'name': PowerOnVM_Task, 'duration_secs': 0.495526} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.614409] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1520.614773] env[63371]: INFO nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Took 9.99 seconds to spawn the instance on the hypervisor. [ 1520.614773] env[63371]: DEBUG nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1520.615621] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3e3a66-00a1-426f-a3c3-f60d52d794db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.673702] env[63371]: DEBUG nova.network.neutron [-] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.766884] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774069, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079304} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1520.766884] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1520.766884] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92e6e67-3215-4284-85d8-065fcf48d712 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.814871] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 6df9af10-0053-4696-920a-10ab2af67ef5/6df9af10-0053-4696-920a-10ab2af67ef5.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1520.819943] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c548ee09-bb06-4957-853e-60faff2abed3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1520.844389] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1520.844389] env[63371]: value = "task-1774070" [ 1520.844389] env[63371]: _type = "Task" [ 1520.844389] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1520.854392] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774070, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1520.877714] env[63371]: DEBUG nova.network.neutron [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Updated VIF entry in instance network info cache for port 8e9291f7-154c-4bfa-bfd8-f09dbd9b4963. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1520.877714] env[63371]: DEBUG nova.network.neutron [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Updating instance_info_cache with network_info: [{"id": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "address": "fa:16:3e:33:87:bc", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e9291f7-15", "ovs_interfaceid": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1520.954647] env[63371]: DEBUG nova.network.neutron [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Successfully updated port: d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1521.028952] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774067, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.063770] env[63371]: DEBUG nova.network.neutron [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Successfully created port: 12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1521.107320] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c739f742-7ea6-4c9e-a673-1471c1d2c662 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.115872] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce5b84e-9187-4215-af99-c2a57116618f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.159762] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cf9a10-2a27-49e1-9b81-2aeb4fbb27d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.164931] env[63371]: INFO nova.compute.manager [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Took 51.64 seconds to build instance. [ 1521.171651] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f28beca9-b564-4f97-8d41-0e6450c9776e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.176148] env[63371]: INFO nova.compute.manager [-] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Took 2.16 seconds to deallocate network for instance. [ 1521.193134] env[63371]: DEBUG nova.compute.provider_tree [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1521.359963] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774070, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.387754] env[63371]: DEBUG oslo_concurrency.lockutils [req-68682bdd-253d-41e3-be6c-59e5b09fb06f req-c829b780-5519-4514-946e-af64c51e6968 service nova] Releasing lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1521.397080] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.397503] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.397806] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.398093] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1521.398342] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.401421] env[63371]: INFO nova.compute.manager [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Terminating instance [ 1521.405029] env[63371]: DEBUG nova.compute.manager [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1521.405029] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1521.406196] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2075d5db-e673-42e5-a96c-a3c75b512bd5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.416969] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1521.416969] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d6b0a57-6030-4f8c-ad85-7feb2ce6f276 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.425072] env[63371]: DEBUG oslo_vmware.api [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1521.425072] env[63371]: value = "task-1774071" [ 1521.425072] env[63371]: _type = "Task" [ 1521.425072] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.442570] env[63371]: DEBUG oslo_vmware.api [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774071, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.460085] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "refresh_cache-0e2c8ced-198f-43be-9d41-703a7c590df4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.460085] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquired lock "refresh_cache-0e2c8ced-198f-43be-9d41-703a7c590df4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.460085] env[63371]: DEBUG nova.network.neutron [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1521.480919] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1521.508663] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1521.508938] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1521.509475] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1521.509475] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1521.509475] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1521.509590] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1521.509779] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1521.510119] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1521.510119] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1521.510763] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1521.510763] env[63371]: DEBUG nova.virt.hardware [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1521.513392] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b384bb00-31e5-4386-82f2-ec6443952f7f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.526335] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d516d38-bef8-4bac-a063-6cddd0d909c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.536080] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774067, 'name': CreateVM_Task, 'duration_secs': 1.420332} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.544475] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1521.546882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1521.546882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1521.546882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1521.547133] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67ce76b8-3568-49d7-856c-6e01bcc20a20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.552552] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1521.552552] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52abaf19-e2b7-714c-ce8c-abb323edf1f4" [ 1521.552552] env[63371]: _type = "Task" [ 1521.552552] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.563369] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52abaf19-e2b7-714c-ce8c-abb323edf1f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.667488] env[63371]: DEBUG oslo_concurrency.lockutils [None req-68b89a50-c9dd-4348-83e0-b5d393dc4218 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.337s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1521.683212] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1521.697926] env[63371]: DEBUG nova.scheduler.client.report [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1521.858460] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774070, 'name': ReconfigVM_Task, 'duration_secs': 0.852407} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.858460] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 6df9af10-0053-4696-920a-10ab2af67ef5/6df9af10-0053-4696-920a-10ab2af67ef5.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1521.858460] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08c733a4-aa6b-49b3-8dc0-2e564a4ac962 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1521.865137] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1521.865137] env[63371]: value = "task-1774073" [ 1521.865137] env[63371]: _type = "Task" [ 1521.865137] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1521.874567] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774073, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1521.935670] env[63371]: DEBUG oslo_vmware.api [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774071, 'name': PowerOffVM_Task, 'duration_secs': 0.310625} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1521.936541] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1521.936541] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1521.936541] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3eeec4e6-7f85-48a6-89c3-d2e4e80f02b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.041018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1522.041136] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1522.041321] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Deleting the datastore file [datastore1] aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1522.041588] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f78de94-7df4-4253-afec-80429038fcc4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.049675] env[63371]: DEBUG oslo_vmware.api [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for the task: (returnval){ [ 1522.049675] env[63371]: value = "task-1774075" [ 1522.049675] env[63371]: _type = "Task" [ 1522.049675] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.068502] env[63371]: DEBUG oslo_vmware.api [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774075, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.072883] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52abaf19-e2b7-714c-ce8c-abb323edf1f4, 'name': SearchDatastore_Task, 'duration_secs': 0.012423} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.072883] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1522.072883] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1522.073081] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.073081] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.073253] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1522.073520] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-df54ca74-0446-42e3-b32a-e26df546e42c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.082584] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1522.082801] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1522.083624] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22bc23ce-15dd-4bce-adb1-6c350866e24a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.089320] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1522.089320] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52238688-4844-9fd0-36d9-404110efa9b4" [ 1522.089320] env[63371]: _type = "Task" [ 1522.089320] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.097030] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52238688-4844-9fd0-36d9-404110efa9b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.173464] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1522.203178] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.763s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.203705] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1522.206957] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.543s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.207159] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.209318] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.143s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.211687] env[63371]: INFO nova.compute.claims [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1522.251345] env[63371]: INFO nova.scheduler.client.report [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Deleted allocations for instance 7e66011a-4fed-471f-82ea-e1016f92ad39 [ 1522.252976] env[63371]: DEBUG nova.network.neutron [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1522.376921] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.376921] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.382164] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774073, 'name': Rename_Task, 'duration_secs': 0.163363} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.384750] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1522.384750] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ec088847-1225-4703-a9c4-6259811ec981 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.390953] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1522.390953] env[63371]: value = "task-1774076" [ 1522.390953] env[63371]: _type = "Task" [ 1522.390953] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.400164] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774076, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.560018] env[63371]: DEBUG oslo_vmware.api [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Task: {'id': task-1774075, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142375} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.560412] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1522.560942] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1522.561341] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1522.561514] env[63371]: INFO nova.compute.manager [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1522.562026] env[63371]: DEBUG oslo.service.loopingcall [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1522.562354] env[63371]: DEBUG nova.compute.manager [-] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1522.562525] env[63371]: DEBUG nova.network.neutron [-] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1522.604856] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52238688-4844-9fd0-36d9-404110efa9b4, 'name': SearchDatastore_Task, 'duration_secs': 0.00862} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.606456] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2010ad8c-aec0-46af-8e79-6ec302e45b04 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1522.611913] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1522.611913] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fa4eab-22b2-f18b-9152-8c331619f528" [ 1522.611913] env[63371]: _type = "Task" [ 1522.611913] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1522.622388] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fa4eab-22b2-f18b-9152-8c331619f528, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1522.683195] env[63371]: DEBUG nova.network.neutron [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Updating instance_info_cache with network_info: [{"id": "d11a5154-6b30-4190-925a-4a07bc31709e", "address": "fa:16:3e:c2:c6:c2", "network": {"id": "ab1c6e5a-670b-45d7-8afa-d89b7e38f1aa", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2086629518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fc50868ddcf4193beb9b3a8a37f97b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd11a5154-6b", "ovs_interfaceid": "d11a5154-6b30-4190-925a-4a07bc31709e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1522.702307] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.716509] env[63371]: DEBUG nova.compute.utils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1522.720053] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1522.720209] env[63371]: DEBUG nova.network.neutron [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1522.764315] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8daf31ad-64d4-4e34-8fa0-8e60559a2234 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.153s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.765874] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 31.501s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.766185] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.766970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.766970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.772646] env[63371]: INFO nova.compute.manager [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Terminating instance [ 1522.776781] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.776781] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquired lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1522.776781] env[63371]: DEBUG nova.network.neutron [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1522.813921] env[63371]: DEBUG nova.compute.manager [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Received event network-vif-deleted-3c5c963f-1c9c-4d03-bb01-5670b9fe06b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1522.819177] env[63371]: DEBUG nova.compute.manager [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Received event network-vif-plugged-d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1522.819177] env[63371]: DEBUG oslo_concurrency.lockutils [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] Acquiring lock "0e2c8ced-198f-43be-9d41-703a7c590df4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1522.819177] env[63371]: DEBUG oslo_concurrency.lockutils [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1522.819177] env[63371]: DEBUG oslo_concurrency.lockutils [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1522.819177] env[63371]: DEBUG nova.compute.manager [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] No waiting events found dispatching network-vif-plugged-d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1522.819177] env[63371]: WARNING nova.compute.manager [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Received unexpected event network-vif-plugged-d11a5154-6b30-4190-925a-4a07bc31709e for instance with vm_state building and task_state spawning. [ 1522.819177] env[63371]: DEBUG nova.compute.manager [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Received event network-changed-d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1522.819177] env[63371]: DEBUG nova.compute.manager [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Refreshing instance network info cache due to event network-changed-d11a5154-6b30-4190-925a-4a07bc31709e. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1522.819177] env[63371]: DEBUG oslo_concurrency.lockutils [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] Acquiring lock "refresh_cache-0e2c8ced-198f-43be-9d41-703a7c590df4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1522.855084] env[63371]: DEBUG nova.policy [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4302f381e0948438b9ee23a33a0f982', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35882164a8734563a006675f2ec6ba71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1522.907591] env[63371]: DEBUG oslo_vmware.api [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774076, 'name': PowerOnVM_Task, 'duration_secs': 0.489575} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1522.907937] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1522.908199] env[63371]: INFO nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Took 9.62 seconds to spawn the instance on the hypervisor. [ 1522.908416] env[63371]: DEBUG nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1522.909249] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1371d54e-9e43-4c3d-9d0e-3431bb1dce9d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.123975] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fa4eab-22b2-f18b-9152-8c331619f528, 'name': SearchDatastore_Task, 'duration_secs': 0.010501} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1523.124335] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.124541] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f/195de525-1081-4db6-acf3-04a6d3eb142f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1523.125042] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b378abff-2ae8-472c-b7dd-5d84ff83580c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.134148] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1523.134148] env[63371]: value = "task-1774077" [ 1523.134148] env[63371]: _type = "Task" [ 1523.134148] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.143470] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774077, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.189848] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Releasing lock "refresh_cache-0e2c8ced-198f-43be-9d41-703a7c590df4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.189848] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Instance network_info: |[{"id": "d11a5154-6b30-4190-925a-4a07bc31709e", "address": "fa:16:3e:c2:c6:c2", "network": {"id": "ab1c6e5a-670b-45d7-8afa-d89b7e38f1aa", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2086629518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fc50868ddcf4193beb9b3a8a37f97b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd11a5154-6b", "ovs_interfaceid": "d11a5154-6b30-4190-925a-4a07bc31709e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1523.191075] env[63371]: DEBUG oslo_concurrency.lockutils [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] Acquired lock "refresh_cache-0e2c8ced-198f-43be-9d41-703a7c590df4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.191155] env[63371]: DEBUG nova.network.neutron [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Refreshing network info cache for port d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1523.192737] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:c6:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '030ecc21-dc1c-4283-854e-88e623b3970a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd11a5154-6b30-4190-925a-4a07bc31709e', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1523.202889] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Creating folder: Project (2fc50868ddcf4193beb9b3a8a37f97b4). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1523.206123] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d86c0b5-bafb-43b3-a6b5-ad9f992528fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.219767] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Created folder: Project (2fc50868ddcf4193beb9b3a8a37f97b4) in parent group-v368199. [ 1523.219767] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Creating folder: Instances. Parent ref: group-v368351. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1523.219767] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-036668bb-4a82-499d-8186-ff273da7c16f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.228770] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1523.233585] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Created folder: Instances in parent group-v368351. [ 1523.233841] env[63371]: DEBUG oslo.service.loopingcall [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1523.234324] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1523.234570] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a98780fe-41e5-4240-ba30-87a74556503c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.256490] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1523.256490] env[63371]: value = "task-1774080" [ 1523.256490] env[63371]: _type = "Task" [ 1523.256490] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1523.269078] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774080, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.283200] env[63371]: DEBUG nova.compute.utils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Can not refresh info_cache because instance was not found {{(pid=63371) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1523.313966] env[63371]: DEBUG nova.network.neutron [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1523.342549] env[63371]: DEBUG nova.network.neutron [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Successfully updated port: 12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1523.438187] env[63371]: INFO nova.compute.manager [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Took 52.69 seconds to build instance. [ 1523.456397] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "d00602b9-16bf-4c11-bc47-6076dddbf159" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.456704] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.465534] env[63371]: DEBUG nova.network.neutron [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1523.644415] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774077, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.774152] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774080, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1523.778524] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49f157a-8a88-402e-be13-ed2f40129b28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.786265] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9eb6ed-c9ee-4fda-8af2-1e5683d87087 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.822794] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cba8757-cdcf-4cc0-ae9b-006d480f02df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.826352] env[63371]: DEBUG nova.compute.manager [req-eee8c2b5-abcc-40df-a7fd-1a4ba2713935 req-ec5a7e30-2012-4de1-8b76-bece79b4e2a5 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Received event network-vif-plugged-12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1523.826615] env[63371]: DEBUG oslo_concurrency.lockutils [req-eee8c2b5-abcc-40df-a7fd-1a4ba2713935 req-ec5a7e30-2012-4de1-8b76-bece79b4e2a5 service nova] Acquiring lock "574121c4-c721-4d30-81ec-3f2310a7b6d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1523.826867] env[63371]: DEBUG oslo_concurrency.lockutils [req-eee8c2b5-abcc-40df-a7fd-1a4ba2713935 req-ec5a7e30-2012-4de1-8b76-bece79b4e2a5 service nova] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1523.827339] env[63371]: DEBUG oslo_concurrency.lockutils [req-eee8c2b5-abcc-40df-a7fd-1a4ba2713935 req-ec5a7e30-2012-4de1-8b76-bece79b4e2a5 service nova] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.827339] env[63371]: DEBUG nova.compute.manager [req-eee8c2b5-abcc-40df-a7fd-1a4ba2713935 req-ec5a7e30-2012-4de1-8b76-bece79b4e2a5 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] No waiting events found dispatching network-vif-plugged-12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1523.827561] env[63371]: WARNING nova.compute.manager [req-eee8c2b5-abcc-40df-a7fd-1a4ba2713935 req-ec5a7e30-2012-4de1-8b76-bece79b4e2a5 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Received unexpected event network-vif-plugged-12bfc72d-5ca7-4f11-8259-77887b5af47c for instance with vm_state building and task_state spawning. [ 1523.835828] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5fd9f87-a56e-4d40-adb6-e66d9e5d8884 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.851062] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "refresh_cache-574121c4-c721-4d30-81ec-3f2310a7b6d1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1523.851062] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "refresh_cache-574121c4-c721-4d30-81ec-3f2310a7b6d1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1523.851319] env[63371]: DEBUG nova.network.neutron [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1523.853747] env[63371]: DEBUG nova.compute.provider_tree [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1523.942193] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64054b29-d1b1-456b-9c08-eaf4f0cc758a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "6df9af10-0053-4696-920a-10ab2af67ef5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.642s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1523.971441] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Releasing lock "refresh_cache-7e66011a-4fed-471f-82ea-e1016f92ad39" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1523.971891] env[63371]: DEBUG nova.compute.manager [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1523.972105] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1523.972389] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f8d1a6d-4e97-4f60-9a6c-0734f7719d3f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1523.982707] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5facb5c-30ad-48ad-89cf-6dc68d54dddf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.001371] env[63371]: DEBUG nova.network.neutron [-] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.018528] env[63371]: WARNING nova.virt.vmwareapi.vmops [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7e66011a-4fed-471f-82ea-e1016f92ad39 could not be found. [ 1524.018528] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1524.018746] env[63371]: INFO nova.compute.manager [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1524.019020] env[63371]: DEBUG oslo.service.loopingcall [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1524.019992] env[63371]: DEBUG nova.compute.manager [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1524.020113] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1524.036712] env[63371]: DEBUG nova.network.neutron [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Updated VIF entry in instance network info cache for port d11a5154-6b30-4190-925a-4a07bc31709e. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1524.037092] env[63371]: DEBUG nova.network.neutron [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Updating instance_info_cache with network_info: [{"id": "d11a5154-6b30-4190-925a-4a07bc31709e", "address": "fa:16:3e:c2:c6:c2", "network": {"id": "ab1c6e5a-670b-45d7-8afa-d89b7e38f1aa", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-2086629518-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2fc50868ddcf4193beb9b3a8a37f97b4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "030ecc21-dc1c-4283-854e-88e623b3970a", "external-id": "nsx-vlan-transportzone-577", "segmentation_id": 577, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd11a5154-6b", "ovs_interfaceid": "d11a5154-6b30-4190-925a-4a07bc31709e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.048990] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1524.073037] env[63371]: DEBUG nova.network.neutron [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Successfully created port: cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1524.145586] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774077, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525468} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.146066] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f/195de525-1081-4db6-acf3-04a6d3eb142f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1524.146408] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1524.146739] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a865f51e-d0a6-402b-bd7f-9690b37f63fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.160035] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1524.160035] env[63371]: value = "task-1774081" [ 1524.160035] env[63371]: _type = "Task" [ 1524.160035] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.167419] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774081, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.241147] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1524.272499] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1524.272641] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1524.272860] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1524.272971] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1524.273378] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1524.273617] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1524.275048] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1524.276090] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1524.276274] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1524.276503] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1524.276599] env[63371]: DEBUG nova.virt.hardware [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1524.278115] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13aefdbf-427e-40e6-8d1f-63fe738fec94 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.287049] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774080, 'name': CreateVM_Task, 'duration_secs': 0.624759} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.287748] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1524.288394] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1524.289910] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.289910] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1524.289910] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8fb7e2ee-0ac1-4350-b814-8230dd40142d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.295911] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4c6da28-53ff-4df4-8a68-35228eb15875 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.301111] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1524.301111] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524fd62d-cf4c-a7cb-1c02-2c3ab4f60194" [ 1524.301111] env[63371]: _type = "Task" [ 1524.301111] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.317536] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524fd62d-cf4c-a7cb-1c02-2c3ab4f60194, 'name': SearchDatastore_Task, 'duration_secs': 0.009952} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.317839] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.318082] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1524.318568] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1524.318720] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1524.318933] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1524.319154] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b04a66f2-9484-4f42-9b56-8d895d25b7cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.327913] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1524.328136] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1524.328877] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0da821c-f0cd-4222-ab73-9008f7db5921 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.334511] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1524.334511] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b795ab-be23-ba3e-0fb1-e3e4abe2f683" [ 1524.334511] env[63371]: _type = "Task" [ 1524.334511] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.342602] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b795ab-be23-ba3e-0fb1-e3e4abe2f683, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.358047] env[63371]: DEBUG nova.scheduler.client.report [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1524.445091] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1524.454292] env[63371]: DEBUG nova.network.neutron [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1524.506514] env[63371]: INFO nova.compute.manager [-] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Took 1.94 seconds to deallocate network for instance. [ 1524.539702] env[63371]: DEBUG oslo_concurrency.lockutils [req-c819ae7c-8e37-4219-8926-608f9c73abe5 req-914cd761-102d-461a-8e66-33ae09399f19 service nova] Releasing lock "refresh_cache-0e2c8ced-198f-43be-9d41-703a7c590df4" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1524.554712] env[63371]: DEBUG nova.network.neutron [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.675811] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774081, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066983} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.676731] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1524.677774] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd48f122-32cf-4c57-801c-5129a9602f32 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.708564] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f/195de525-1081-4db6-acf3-04a6d3eb142f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1524.712532] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30818503-ddbd-4335-9c71-063eb1eed572 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.731539] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1524.731539] env[63371]: value = "task-1774082" [ 1524.731539] env[63371]: _type = "Task" [ 1524.731539] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.742308] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774082, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.846548] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b795ab-be23-ba3e-0fb1-e3e4abe2f683, 'name': SearchDatastore_Task, 'duration_secs': 0.010141} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1524.847395] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d3748db-5ffa-4579-a29b-e46e97d3bc00 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1524.853322] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1524.853322] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c883f1-9f8c-543b-639d-4d1b11b0ce05" [ 1524.853322] env[63371]: _type = "Task" [ 1524.853322] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1524.863534] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1524.864099] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1524.866772] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c883f1-9f8c-543b-639d-4d1b11b0ce05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1524.866968] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.615s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1524.868468] env[63371]: INFO nova.compute.claims [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1524.907358] env[63371]: DEBUG nova.network.neutron [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Updating instance_info_cache with network_info: [{"id": "12bfc72d-5ca7-4f11-8259-77887b5af47c", "address": "fa:16:3e:9e:61:49", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12bfc72d-5c", "ovs_interfaceid": "12bfc72d-5ca7-4f11-8259-77887b5af47c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1524.973164] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.013658] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.057374] env[63371]: INFO nova.compute.manager [-] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Took 1.04 seconds to deallocate network for instance. [ 1525.243478] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774082, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.347783] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquiring lock "1cb18f2a-6476-4492-8576-7b0fd693a107" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1525.348105] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lock "1cb18f2a-6476-4492-8576-7b0fd693a107" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1525.365244] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c883f1-9f8c-543b-639d-4d1b11b0ce05, 'name': SearchDatastore_Task, 'duration_secs': 0.009582} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.365504] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1525.365688] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0e2c8ced-198f-43be-9d41-703a7c590df4/0e2c8ced-198f-43be-9d41-703a7c590df4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1525.366162] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4f95acbd-40bc-4618-a625-0f9e9a0a35a1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.373310] env[63371]: DEBUG nova.compute.utils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1525.378475] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1525.379236] env[63371]: DEBUG nova.network.neutron [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1525.384472] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1525.384472] env[63371]: value = "task-1774083" [ 1525.384472] env[63371]: _type = "Task" [ 1525.384472] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.399937] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774083, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.410896] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "refresh_cache-574121c4-c721-4d30-81ec-3f2310a7b6d1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1525.411348] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance network_info: |[{"id": "12bfc72d-5ca7-4f11-8259-77887b5af47c", "address": "fa:16:3e:9e:61:49", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12bfc72d-5c", "ovs_interfaceid": "12bfc72d-5ca7-4f11-8259-77887b5af47c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1525.411844] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:61:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12bfc72d-5ca7-4f11-8259-77887b5af47c', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1525.422948] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating folder: Project (e96348bcfea1455dad72945c7c36f027). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1525.423660] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1bd9d7d4-ab40-4ba4-ab0e-bca205863494 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.435054] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created folder: Project (e96348bcfea1455dad72945c7c36f027) in parent group-v368199. [ 1525.435054] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating folder: Instances. Parent ref: group-v368354. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1525.435274] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-763dd160-285b-4570-91f9-cb73bff4ae16 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.444442] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created folder: Instances in parent group-v368354. [ 1525.444704] env[63371]: DEBUG oslo.service.loopingcall [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1525.444878] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1525.445122] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4441799-e5d9-499a-81df-fe41fe585aa8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.469786] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1525.469786] env[63371]: value = "task-1774086" [ 1525.469786] env[63371]: _type = "Task" [ 1525.469786] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.478502] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774086, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.499360] env[63371]: DEBUG nova.policy [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38c65e6dd9e4468fb1a0235bac086151', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4ca8a73414142d497ebd3d3f043d9ae', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1525.572832] env[63371]: INFO nova.compute.manager [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance disappeared during terminate [ 1525.572832] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1c1a9bb5-f7fa-42ba-b2b2-89f964c172be tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "7e66011a-4fed-471f-82ea-e1016f92ad39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.807s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1525.743569] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774082, 'name': ReconfigVM_Task, 'duration_secs': 0.735556} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.743889] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f/195de525-1081-4db6-acf3-04a6d3eb142f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1525.744105] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=63371) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1525.744781] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-7714a2b3-c16d-40a8-b036-5afff60ce867 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1525.751597] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1525.751597] env[63371]: value = "task-1774087" [ 1525.751597] env[63371]: _type = "Task" [ 1525.751597] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1525.760510] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774087, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.885606] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1525.908123] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774083, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1525.993612] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774086, 'name': CreateVM_Task, 'duration_secs': 0.488904} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1525.993612] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1525.993612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1525.993612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1525.993612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1525.993612] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-717b7566-e076-4ca3-a11a-0d6da9fb71cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.005627] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1526.005627] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5234c555-72c2-fc86-3898-80e04d1ed481" [ 1526.005627] env[63371]: _type = "Task" [ 1526.005627] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.013286] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5234c555-72c2-fc86-3898-80e04d1ed481, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.036641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Acquiring lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.036770] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.053806] env[63371]: DEBUG nova.compute.manager [req-d3ce9dd6-c364-4797-8928-a2d7464ef97d req-1265df32-c65a-4048-818a-1c9b33fa6340 service nova] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Received event network-vif-deleted-5f6d168b-1bd3-4bdd-9693-ee62c25e8666 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1526.269559] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774087, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.084752} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.270224] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=63371) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1526.273674] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce131ec2-d41f-43f3-a075-1cecc6fc134d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.301450] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Reconfiguring VM instance instance-00000034 to attach disk [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f/ephemeral_0.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1526.304257] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78935c75-fe97-4c63-857b-630481f6b279 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.323703] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1526.323703] env[63371]: value = "task-1774088" [ 1526.323703] env[63371]: _type = "Task" [ 1526.323703] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.336578] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774088, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.414774] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774083, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.678666} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.417425] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0e2c8ced-198f-43be-9d41-703a7c590df4/0e2c8ced-198f-43be-9d41-703a7c590df4.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1526.417642] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1526.419167] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2cedc21d-2c09-47c1-966a-7bd1f2aebbf2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.430430] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1526.430430] env[63371]: value = "task-1774089" [ 1526.430430] env[63371]: _type = "Task" [ 1526.430430] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.441440] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.478460] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b304e862-a142-47c8-90a0-1c5b2d551c85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.487501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a75620-29a2-4eae-8f7e-7b6bfa4e84fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.526405] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e29e32-ab77-41b2-afc7-591365c83702 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.532034] env[63371]: DEBUG nova.compute.manager [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Received event network-changed-12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1526.532119] env[63371]: DEBUG nova.compute.manager [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Refreshing instance network info cache due to event network-changed-12bfc72d-5ca7-4f11-8259-77887b5af47c. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1526.532514] env[63371]: DEBUG oslo_concurrency.lockutils [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] Acquiring lock "refresh_cache-574121c4-c721-4d30-81ec-3f2310a7b6d1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.532611] env[63371]: DEBUG oslo_concurrency.lockutils [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] Acquired lock "refresh_cache-574121c4-c721-4d30-81ec-3f2310a7b6d1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.532846] env[63371]: DEBUG nova.network.neutron [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Refreshing network info cache for port 12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1526.545864] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5234c555-72c2-fc86-3898-80e04d1ed481, 'name': SearchDatastore_Task, 'duration_secs': 0.04131} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.547934] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139f2796-98db-40c1-9de3-a398179f8c86 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.553026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1526.553026] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1526.553235] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1526.553378] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1526.553558] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1526.554301] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70a369d7-131f-4370-881d-8df994758e5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.568175] env[63371]: DEBUG nova.compute.provider_tree [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1526.570601] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1526.570801] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1526.572562] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef9b3e3f-d62f-4102-b871-1cae828de087 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.577938] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1526.577938] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52be4a31-235b-6af1-54a4-243b6fd0e2f2" [ 1526.577938] env[63371]: _type = "Task" [ 1526.577938] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.582373] env[63371]: DEBUG nova.network.neutron [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Successfully created port: f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1526.591246] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52be4a31-235b-6af1-54a4-243b6fd0e2f2, 'name': SearchDatastore_Task, 'duration_secs': 0.010459} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.592303] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-294a5607-2177-46ef-a95c-6d07cffe72aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.597501] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1526.597501] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d460c6-3c1c-b2e9-4a3b-f0044a6d8355" [ 1526.597501] env[63371]: _type = "Task" [ 1526.597501] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.606987] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d460c6-3c1c-b2e9-4a3b-f0044a6d8355, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.690917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.691274] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.691585] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1526.691736] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1526.691958] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1526.694413] env[63371]: INFO nova.compute.manager [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Terminating instance [ 1526.696469] env[63371]: DEBUG nova.compute.manager [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1526.696786] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1526.697600] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857bf924-9580-4a8a-bfb8-bcc8007f9319 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.705559] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1526.705884] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ea44f56-0802-46fa-840c-274f7ab8b527 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.711848] env[63371]: DEBUG oslo_vmware.api [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1526.711848] env[63371]: value = "task-1774090" [ 1526.711848] env[63371]: _type = "Task" [ 1526.711848] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1526.722095] env[63371]: DEBUG oslo_vmware.api [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1774090, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.834043] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774088, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1526.911354] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1526.942782] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074694} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1526.946137] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1526.946475] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1526.946475] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1526.946983] env[63371]: DEBUG nova.virt.hardware [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1526.946983] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1526.950107] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c448b552-cd38-4f87-bd99-c72c982b6e10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.954149] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d9b06a-6648-4217-ad33-1b4a2e98d08b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.975847] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f03744f9-1f57-4fee-9e96-328f446e3cc5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1526.989952] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 0e2c8ced-198f-43be-9d41-703a7c590df4/0e2c8ced-198f-43be-9d41-703a7c590df4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1526.990705] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-969d29c9-2541-42fb-b41f-d37fdd229a62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.007476] env[63371]: DEBUG nova.network.neutron [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Successfully updated port: cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1527.019953] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1527.019953] env[63371]: value = "task-1774091" [ 1527.019953] env[63371]: _type = "Task" [ 1527.019953] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.029972] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774091, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.073293] env[63371]: DEBUG nova.scheduler.client.report [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1527.109177] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d460c6-3c1c-b2e9-4a3b-f0044a6d8355, 'name': SearchDatastore_Task, 'duration_secs': 0.009099} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.109467] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1527.109791] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1527.110085] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c4fc0625-b806-4df5-9a42-41246a77dc8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.118841] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1527.118841] env[63371]: value = "task-1774092" [ 1527.118841] env[63371]: _type = "Task" [ 1527.118841] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.130772] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.228846] env[63371]: DEBUG oslo_vmware.api [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1774090, 'name': PowerOffVM_Task, 'duration_secs': 0.511639} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.228846] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1527.228846] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1527.228846] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53c8ccd3-062b-4d4e-b34e-9444ed63cd3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.308163] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1527.308163] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1527.308163] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Deleting the datastore file [datastore1] e0369f27-68ea-49c4-8524-3dbbb3cde96e {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1527.308443] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c0ee9f3-e63e-49c2-ab2c-db5ef71152ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.323526] env[63371]: DEBUG oslo_vmware.api [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for the task: (returnval){ [ 1527.323526] env[63371]: value = "task-1774094" [ 1527.323526] env[63371]: _type = "Task" [ 1527.323526] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.337092] env[63371]: DEBUG oslo_vmware.api [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1774094, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.341265] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774088, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.513850] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1527.513850] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1527.513850] env[63371]: DEBUG nova.network.neutron [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1527.535950] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774091, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.580643] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.712s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1527.580643] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1527.583972] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.165s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1527.584274] env[63371]: DEBUG nova.objects.instance [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lazy-loading 'resources' on Instance uuid fb2ddd3e-7adc-4a34-8797-0e98fdf19379 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1527.629627] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774092, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1527.842894] env[63371]: DEBUG oslo_vmware.api [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Task: {'id': task-1774094, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.445786} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.847049] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1527.847049] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1527.847049] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1527.847049] env[63371]: INFO nova.compute.manager [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1527.847049] env[63371]: DEBUG oslo.service.loopingcall [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1527.847296] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774088, 'name': ReconfigVM_Task, 'duration_secs': 1.022792} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1527.847492] env[63371]: DEBUG nova.compute.manager [-] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1527.847633] env[63371]: DEBUG nova.network.neutron [-] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1527.849270] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Reconfigured VM instance instance-00000034 to attach disk [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f/ephemeral_0.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1527.849970] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-186be719-b11c-4f6b-9b7a-86a0ff5c3c5d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1527.857219] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1527.857219] env[63371]: value = "task-1774095" [ 1527.857219] env[63371]: _type = "Task" [ 1527.857219] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1527.869049] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774095, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.032360] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774091, 'name': ReconfigVM_Task, 'duration_secs': 0.707711} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.032578] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 0e2c8ced-198f-43be-9d41-703a7c590df4/0e2c8ced-198f-43be-9d41-703a7c590df4.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1528.033259] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4303afad-de5b-4d51-a8c8-41987e746637 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.040776] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1528.040776] env[63371]: value = "task-1774096" [ 1528.040776] env[63371]: _type = "Task" [ 1528.040776] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.051024] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774096, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.062873] env[63371]: DEBUG nova.network.neutron [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1528.095264] env[63371]: DEBUG nova.compute.utils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1528.099623] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1528.099800] env[63371]: DEBUG nova.network.neutron [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1528.139155] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774092, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.587414} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.139370] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1528.139585] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1528.139860] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1fe560a-5f53-4e9e-87fe-a819fe10b759 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.146371] env[63371]: DEBUG nova.network.neutron [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Updated VIF entry in instance network info cache for port 12bfc72d-5ca7-4f11-8259-77887b5af47c. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1528.146706] env[63371]: DEBUG nova.network.neutron [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Updating instance_info_cache with network_info: [{"id": "12bfc72d-5ca7-4f11-8259-77887b5af47c", "address": "fa:16:3e:9e:61:49", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap12bfc72d-5c", "ovs_interfaceid": "12bfc72d-5ca7-4f11-8259-77887b5af47c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.153564] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1528.153564] env[63371]: value = "task-1774097" [ 1528.153564] env[63371]: _type = "Task" [ 1528.153564] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.171636] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.195917] env[63371]: DEBUG nova.policy [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c37f7c756994f8587c0ff8c0b2b6c43', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fe8537857034ada970b516fcf2fce57', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1528.371906] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774095, 'name': Rename_Task, 'duration_secs': 0.340535} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.376081] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1528.376408] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6eafe1cc-9977-4697-b0a5-39b5b74603d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.385295] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1528.385295] env[63371]: value = "task-1774098" [ 1528.385295] env[63371]: _type = "Task" [ 1528.385295] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.397058] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774098, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.556476] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774096, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.563843] env[63371]: DEBUG nova.network.neutron [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Updating instance_info_cache with network_info: [{"id": "cf8050ea-381c-487b-9981-c3f042d673e1", "address": "fa:16:3e:86:04:09", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8050ea-38", "ovs_interfaceid": "cf8050ea-381c-487b-9981-c3f042d673e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1528.607528] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1528.618484] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2c56ffc-2ee8-452a-b37c-160b2f82fb02 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.627825] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5bd3aec-a7c9-4ba5-bb5c-1d65b667b0cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.668840] env[63371]: DEBUG oslo_concurrency.lockutils [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] Releasing lock "refresh_cache-574121c4-c721-4d30-81ec-3f2310a7b6d1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1528.669470] env[63371]: DEBUG nova.compute.manager [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Received event network-changed-01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1528.669470] env[63371]: DEBUG nova.compute.manager [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Refreshing instance network info cache due to event network-changed-01b878e5-651e-49f1-959f-7da17291c0bc. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1528.669596] env[63371]: DEBUG oslo_concurrency.lockutils [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] Acquiring lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.670241] env[63371]: DEBUG oslo_concurrency.lockutils [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] Acquired lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1528.670241] env[63371]: DEBUG nova.network.neutron [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Refreshing network info cache for port 01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1528.678029] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3ab504-df56-4134-96a1-d083c5cc2468 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.687855] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065576} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1528.688740] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1528.689661] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc9ac2d1-dad4-4977-87a9-5c40ee312402 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.693622] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0dc4f7f-3976-4a12-a850-62daddd68ef7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.721416] env[63371]: DEBUG nova.compute.provider_tree [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1528.731770] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1528.733302] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7665183f-6eb1-4fbb-83cc-6c4cfa74817d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1528.748600] env[63371]: DEBUG nova.compute.manager [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Received event network-vif-plugged-cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1528.748805] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] Acquiring lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.749079] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] Lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.749210] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] Lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1528.749373] env[63371]: DEBUG nova.compute.manager [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] No waiting events found dispatching network-vif-plugged-cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1528.749533] env[63371]: WARNING nova.compute.manager [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Received unexpected event network-vif-plugged-cf8050ea-381c-487b-9981-c3f042d673e1 for instance with vm_state building and task_state spawning. [ 1528.749689] env[63371]: DEBUG nova.compute.manager [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Received event network-changed-cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1528.749846] env[63371]: DEBUG nova.compute.manager [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Refreshing instance network info cache due to event network-changed-cf8050ea-381c-487b-9981-c3f042d673e1. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1528.750020] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] Acquiring lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1528.759075] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1528.759075] env[63371]: value = "task-1774099" [ 1528.759075] env[63371]: _type = "Task" [ 1528.759075] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1528.768481] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774099, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.888601] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "0c9156ea-81c4-4286-a20b-66068a5bce59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1528.888866] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "0c9156ea-81c4-4286-a20b-66068a5bce59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1528.901540] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774098, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1528.914844] env[63371]: DEBUG nova.network.neutron [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Successfully created port: dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1529.053459] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774096, 'name': Rename_Task, 'duration_secs': 0.960468} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.053762] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1529.054174] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef1ce9dd-f84c-4a69-b784-0ead5dc74e2f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.063039] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1529.063039] env[63371]: value = "task-1774100" [ 1529.063039] env[63371]: _type = "Task" [ 1529.063039] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.071041] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1529.073415] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Instance network_info: |[{"id": "cf8050ea-381c-487b-9981-c3f042d673e1", "address": "fa:16:3e:86:04:09", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8050ea-38", "ovs_interfaceid": "cf8050ea-381c-487b-9981-c3f042d673e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1529.073706] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774100, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.073945] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] Acquired lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.074129] env[63371]: DEBUG nova.network.neutron [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Refreshing network info cache for port cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1529.075456] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:86:04:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf8050ea-381c-487b-9981-c3f042d673e1', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1529.086070] env[63371]: DEBUG oslo.service.loopingcall [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1529.087141] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1529.087430] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ef85f9d-34ca-4446-859d-5f200b06f731 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.102097] env[63371]: DEBUG nova.network.neutron [-] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.109461] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1529.109461] env[63371]: value = "task-1774101" [ 1529.109461] env[63371]: _type = "Task" [ 1529.109461] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.125187] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774101, 'name': CreateVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.219396] env[63371]: DEBUG nova.network.neutron [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Successfully updated port: f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1529.236704] env[63371]: DEBUG nova.scheduler.client.report [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1529.269761] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774099, 'name': ReconfigVM_Task, 'duration_secs': 0.492686} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.270067] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1529.271163] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0c14d6ef-2447-4e27-b817-6ee3407a2bb3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.277043] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1529.277043] env[63371]: value = "task-1774102" [ 1529.277043] env[63371]: _type = "Task" [ 1529.277043] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.285736] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774102, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.399109] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774098, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.537999] env[63371]: DEBUG nova.network.neutron [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updated VIF entry in instance network info cache for port 01b878e5-651e-49f1-959f-7da17291c0bc. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1529.538438] env[63371]: DEBUG nova.network.neutron [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [{"id": "01b878e5-651e-49f1-959f-7da17291c0bc", "address": "fa:16:3e:b7:c4:0c", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b878e5-65", "ovs_interfaceid": "01b878e5-651e-49f1-959f-7da17291c0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.577453] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774100, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.604888] env[63371]: INFO nova.compute.manager [-] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Took 1.76 seconds to deallocate network for instance. [ 1529.622042] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1529.624068] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774101, 'name': CreateVM_Task, 'duration_secs': 0.475626} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.626849] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1529.627777] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.627777] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.628072] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1529.628790] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c897c89-4475-4eb4-89a7-c43b1ca21edd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.634178] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1529.634178] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c0779c-67cb-ed77-47c4-c7b8bee69ee9" [ 1529.634178] env[63371]: _type = "Task" [ 1529.634178] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.646209] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c0779c-67cb-ed77-47c4-c7b8bee69ee9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.651939] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1529.652255] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1529.652377] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1529.652626] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1529.652684] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1529.652793] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1529.653022] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1529.653179] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1529.653347] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1529.653516] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1529.653639] env[63371]: DEBUG nova.virt.hardware [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1529.654509] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c227898a-f21f-4440-ae82-e2d629f3cdfb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.666031] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b58798-f3e8-4113-9de8-7281a1d64e97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.722960] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1529.723201] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1529.723349] env[63371]: DEBUG nova.network.neutron [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1529.742445] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.158s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1529.745747] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.055s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1529.748243] env[63371]: DEBUG nova.objects.instance [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lazy-loading 'resources' on Instance uuid 36b81143-211f-4c77-854b-abe0d3f39ce4 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1529.776380] env[63371]: INFO nova.scheduler.client.report [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleted allocations for instance fb2ddd3e-7adc-4a34-8797-0e98fdf19379 [ 1529.790411] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774102, 'name': Rename_Task, 'duration_secs': 0.258587} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.790963] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1529.791481] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5bb63233-8775-4a70-b384-9b4d3443b5d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1529.799943] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1529.799943] env[63371]: value = "task-1774103" [ 1529.799943] env[63371]: _type = "Task" [ 1529.799943] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1529.809022] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774103, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1529.897617] env[63371]: DEBUG oslo_vmware.api [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774098, 'name': PowerOnVM_Task, 'duration_secs': 1.098409} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1529.898513] env[63371]: DEBUG nova.network.neutron [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Updated VIF entry in instance network info cache for port cf8050ea-381c-487b-9981-c3f042d673e1. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1529.902024] env[63371]: DEBUG nova.network.neutron [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Updating instance_info_cache with network_info: [{"id": "cf8050ea-381c-487b-9981-c3f042d673e1", "address": "fa:16:3e:86:04:09", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8050ea-38", "ovs_interfaceid": "cf8050ea-381c-487b-9981-c3f042d673e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1529.902024] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1529.902024] env[63371]: INFO nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Took 13.33 seconds to spawn the instance on the hypervisor. [ 1529.902024] env[63371]: DEBUG nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1529.902024] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aab6e9a-517d-42fb-8775-7604106ccc22 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.041662] env[63371]: DEBUG oslo_concurrency.lockutils [req-43883ffb-d84a-4c87-a3bd-17ecaf21573f req-93127c69-de93-445f-8635-f1417c143bee service nova] Releasing lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.075454] env[63371]: DEBUG oslo_vmware.api [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774100, 'name': PowerOnVM_Task, 'duration_secs': 0.597983} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.075740] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1530.075943] env[63371]: INFO nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Took 11.32 seconds to spawn the instance on the hypervisor. [ 1530.076127] env[63371]: DEBUG nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1530.077048] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0825b0-7b80-4f5b-9af4-64b3790042ff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.115853] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.145861] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c0779c-67cb-ed77-47c4-c7b8bee69ee9, 'name': SearchDatastore_Task, 'duration_secs': 0.012487} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.146102] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.146345] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1530.146619] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.146778] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.146956] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1530.147232] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-738cf14a-face-4d72-9355-56364c8ac299 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.156483] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1530.156675] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1530.157443] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d31367f7-f75c-4e51-a45a-d269fd7cfa85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.167957] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1530.167957] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52426b06-3cea-6015-c10d-feea60e42a99" [ 1530.167957] env[63371]: _type = "Task" [ 1530.167957] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.178229] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52426b06-3cea-6015-c10d-feea60e42a99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.278482] env[63371]: DEBUG nova.network.neutron [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1530.287928] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c711477-e2d9-4638-9c5f-b65f8935e1a6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "fb2ddd3e-7adc-4a34-8797-0e98fdf19379" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.920s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.315721] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774103, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.405702] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7a10335-e1be-4e25-b09d-c47291fc9011 req-a5d1cf8a-45f8-40e2-8642-684010925214 service nova] Releasing lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1530.422324] env[63371]: INFO nova.compute.manager [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Took 52.81 seconds to build instance. [ 1530.436403] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquiring lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.436849] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.495647] env[63371]: DEBUG nova.network.neutron [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Updating instance_info_cache with network_info: [{"id": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "address": "fa:16:3e:1a:60:7b", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7958f5c-d0", "ovs_interfaceid": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1530.600247] env[63371]: INFO nova.compute.manager [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Took 49.66 seconds to build instance. [ 1530.610447] env[63371]: DEBUG nova.compute.manager [req-3cda18d2-5301-4642-984a-39a55e40f7bd req-5b6faff2-809d-4d19-832d-9ca9709c4ff7 service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Received event network-vif-plugged-dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1530.610699] env[63371]: DEBUG oslo_concurrency.lockutils [req-3cda18d2-5301-4642-984a-39a55e40f7bd req-5b6faff2-809d-4d19-832d-9ca9709c4ff7 service nova] Acquiring lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1530.611321] env[63371]: DEBUG oslo_concurrency.lockutils [req-3cda18d2-5301-4642-984a-39a55e40f7bd req-5b6faff2-809d-4d19-832d-9ca9709c4ff7 service nova] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1530.611321] env[63371]: DEBUG oslo_concurrency.lockutils [req-3cda18d2-5301-4642-984a-39a55e40f7bd req-5b6faff2-809d-4d19-832d-9ca9709c4ff7 service nova] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.611321] env[63371]: DEBUG nova.compute.manager [req-3cda18d2-5301-4642-984a-39a55e40f7bd req-5b6faff2-809d-4d19-832d-9ca9709c4ff7 service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] No waiting events found dispatching network-vif-plugged-dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1530.611802] env[63371]: WARNING nova.compute.manager [req-3cda18d2-5301-4642-984a-39a55e40f7bd req-5b6faff2-809d-4d19-832d-9ca9709c4ff7 service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Received unexpected event network-vif-plugged-dc1a6185-a139-4788-bbd2-d5540dd42733 for instance with vm_state building and task_state spawning. [ 1530.684139] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52426b06-3cea-6015-c10d-feea60e42a99, 'name': SearchDatastore_Task, 'duration_secs': 0.009674} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.684992] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1df0ac45-7aa6-4ebf-9632-68bebf6dee64 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.694792] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1530.694792] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524517d3-d158-8f84-967c-8f1ff597c474" [ 1530.694792] env[63371]: _type = "Task" [ 1530.694792] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1530.704925] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524517d3-d158-8f84-967c-8f1ff597c474, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1530.758632] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Received event network-changed-a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1530.758869] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Refreshing instance network info cache due to event network-changed-a7788c55-6aa0-4056-b8d1-cff8ad8951f7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1530.759206] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Acquiring lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1530.759252] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Acquired lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1530.759404] env[63371]: DEBUG nova.network.neutron [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Refreshing network info cache for port a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1530.784191] env[63371]: DEBUG nova.network.neutron [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Successfully updated port: dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1530.812079] env[63371]: DEBUG oslo_vmware.api [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774103, 'name': PowerOnVM_Task, 'duration_secs': 0.895627} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1530.812376] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1530.812578] env[63371]: INFO nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Took 9.33 seconds to spawn the instance on the hypervisor. [ 1530.812748] env[63371]: DEBUG nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1530.816164] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304c5d62-3be8-48f0-93b6-c0c477b5f903 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.838285] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af11eea-4a91-4d72-88f3-76cf7e8e08db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.847303] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4219e446-75fd-4045-8921-18e6dfec71f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.887027] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e9ee2c6-3ab0-4231-b8b1-194c25ce81aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.896284] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52265e59-636d-4439-a0fd-8c75720a218a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1530.913923] env[63371]: DEBUG nova.compute.provider_tree [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1530.927279] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b1882d00-c79e-4cf0-ba53-39d1c52d442b tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "195de525-1081-4db6-acf3-04a6d3eb142f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.254s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1530.999667] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.001268] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Instance network_info: |[{"id": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "address": "fa:16:3e:1a:60:7b", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7958f5c-d0", "ovs_interfaceid": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1531.001268] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:60:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7958f5c-d0af-44e7-bbb2-e6fa265a6da3', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1531.011217] env[63371]: DEBUG oslo.service.loopingcall [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1531.011462] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1531.011685] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2b90798-7a80-4594-8fa4-9e623f36fb44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.034230] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1531.034230] env[63371]: value = "task-1774104" [ 1531.034230] env[63371]: _type = "Task" [ 1531.034230] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.043079] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774104, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.103047] env[63371]: DEBUG oslo_concurrency.lockutils [None req-34daa863-46a7-4631-9097-d3515766eb1b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.993s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.210677] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524517d3-d158-8f84-967c-8f1ff597c474, 'name': SearchDatastore_Task, 'duration_secs': 0.009977} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.211088] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1531.211370] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed/150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1531.211776] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33f71057-f921-4498-9167-2dbbb77cd9f5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.219147] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1531.219147] env[63371]: value = "task-1774105" [ 1531.219147] env[63371]: _type = "Task" [ 1531.219147] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.226977] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774105, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.286508] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "refresh_cache-d6bc618e-33c9-4b45-b79f-afe6811acd4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1531.286699] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquired lock "refresh_cache-d6bc618e-33c9-4b45-b79f-afe6811acd4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1531.286890] env[63371]: DEBUG nova.network.neutron [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1531.341860] env[63371]: INFO nova.compute.manager [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Took 47.72 seconds to build instance. [ 1531.416717] env[63371]: DEBUG nova.scheduler.client.report [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1531.430693] env[63371]: DEBUG nova.compute.manager [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1531.546192] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774104, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.606516] env[63371]: DEBUG nova.compute.manager [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1531.728877] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774105, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497648} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1531.731211] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed/150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1531.731455] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1531.731733] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b1744d3-c69d-49d3-98ef-9ba614d6066e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.735209] env[63371]: DEBUG nova.network.neutron [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updated VIF entry in instance network info cache for port a7788c55-6aa0-4056-b8d1-cff8ad8951f7. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1531.735286] env[63371]: DEBUG nova.network.neutron [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updating instance_info_cache with network_info: [{"id": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "address": "fa:16:3e:8d:c6:ca", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7788c55-6a", "ovs_interfaceid": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1531.738237] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1531.738237] env[63371]: value = "task-1774106" [ 1531.738237] env[63371]: _type = "Task" [ 1531.738237] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.746055] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774106, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.814855] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "0e2c8ced-198f-43be-9d41-703a7c590df4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.814855] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.814855] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "0e2c8ced-198f-43be-9d41-703a7c590df4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1531.814855] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.814855] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.815497] env[63371]: INFO nova.compute.manager [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Terminating instance [ 1531.818707] env[63371]: DEBUG nova.compute.manager [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1531.819053] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1531.819976] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62700eff-ddeb-432f-8f67-1b501881b2c7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.828586] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1531.829205] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9c45d8c2-8820-4f93-81e0-d6ae0dd75739 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1531.837191] env[63371]: DEBUG nova.network.neutron [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1531.838502] env[63371]: DEBUG oslo_vmware.api [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1531.838502] env[63371]: value = "task-1774107" [ 1531.838502] env[63371]: _type = "Task" [ 1531.838502] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1531.848696] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2653379e-ac1c-43dc-ab8f-de87bcc00d73 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.066s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.849166] env[63371]: DEBUG oslo_vmware.api [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774107, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1531.925274] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.179s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1531.927270] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.176s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1531.929128] env[63371]: INFO nova.compute.claims [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1531.948697] env[63371]: INFO nova.scheduler.client.report [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Deleted allocations for instance 36b81143-211f-4c77-854b-abe0d3f39ce4 [ 1531.962615] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.021385] env[63371]: DEBUG nova.network.neutron [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Updating instance_info_cache with network_info: [{"id": "dc1a6185-a139-4788-bbd2-d5540dd42733", "address": "fa:16:3e:ec:b8:32", "network": {"id": "6f1d0296-7fae-445e-95cd-2ec84607e8cb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1953892032-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe8537857034ada970b516fcf2fce57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc1a6185-a1", "ovs_interfaceid": "dc1a6185-a139-4788-bbd2-d5540dd42733", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1532.048028] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774104, 'name': CreateVM_Task, 'duration_secs': 0.522182} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.048028] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1532.048746] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.048854] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.049951] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1532.049951] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f3fabe0-ab7c-4e72-a131-9acb9d91f534 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.054727] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1532.054727] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]527f3e64-a77e-3b37-dfd6-dbe8fc4acd15" [ 1532.054727] env[63371]: _type = "Task" [ 1532.054727] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.063807] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527f3e64-a77e-3b37-dfd6-dbe8fc4acd15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.146415] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.237433] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Releasing lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.237793] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Received event network-vif-deleted-fcd67cd5-500d-457a-9bbb-655583d97dd2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1532.237885] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Received event network-vif-plugged-f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1532.238084] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Acquiring lock "1c93487b-6d8f-424d-8b95-10bfb894c609-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1532.238306] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Lock "1c93487b-6d8f-424d-8b95-10bfb894c609-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1532.238474] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Lock "1c93487b-6d8f-424d-8b95-10bfb894c609-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.238632] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] No waiting events found dispatching network-vif-plugged-f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1532.238825] env[63371]: WARNING nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Received unexpected event network-vif-plugged-f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 for instance with vm_state building and task_state spawning. [ 1532.238986] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Received event network-changed-f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1532.239209] env[63371]: DEBUG nova.compute.manager [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Refreshing instance network info cache due to event network-changed-f7958f5c-d0af-44e7-bbb2-e6fa265a6da3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1532.239330] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Acquiring lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.239466] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Acquired lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.239617] env[63371]: DEBUG nova.network.neutron [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Refreshing network info cache for port f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1532.250561] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774106, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072946} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.250856] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1532.251630] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f70c889-3fd0-4bd8-b843-c0b93b5eaa7b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.276379] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed/150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1532.276960] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b8b27bc-cb2b-476c-9885-4cf2c5bd4317 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.296564] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1532.296564] env[63371]: value = "task-1774108" [ 1532.296564] env[63371]: _type = "Task" [ 1532.296564] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.305577] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774108, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.348358] env[63371]: DEBUG oslo_vmware.api [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774107, 'name': PowerOffVM_Task, 'duration_secs': 0.286782} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.348634] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1532.348833] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1532.349116] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9c6ae630-5fc9-4b54-9569-9bfab712e9f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.352187] env[63371]: DEBUG nova.compute.manager [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1532.456797] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85a8ef55-8515-4905-a2be-0141616022e6 tempest-ListServersNegativeTestJSON-1191291629 tempest-ListServersNegativeTestJSON-1191291629-project-member] Lock "36b81143-211f-4c77-854b-abe0d3f39ce4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.902s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1532.480583] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1532.480862] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1532.481044] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Deleting the datastore file [datastore1] 0e2c8ced-198f-43be-9d41-703a7c590df4 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1532.481305] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c96bbfbf-a248-4e09-8ee5-9866afffbd26 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.488853] env[63371]: DEBUG oslo_vmware.api [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for the task: (returnval){ [ 1532.488853] env[63371]: value = "task-1774110" [ 1532.488853] env[63371]: _type = "Task" [ 1532.488853] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.496968] env[63371]: DEBUG oslo_vmware.api [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774110, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.524300] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Releasing lock "refresh_cache-d6bc618e-33c9-4b45-b79f-afe6811acd4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.524562] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Instance network_info: |[{"id": "dc1a6185-a139-4788-bbd2-d5540dd42733", "address": "fa:16:3e:ec:b8:32", "network": {"id": "6f1d0296-7fae-445e-95cd-2ec84607e8cb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1953892032-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe8537857034ada970b516fcf2fce57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc1a6185-a1", "ovs_interfaceid": "dc1a6185-a139-4788-bbd2-d5540dd42733", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1532.524973] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:b8:32', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd33839ae-40ca-471b-92e3-eb282b920682', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dc1a6185-a139-4788-bbd2-d5540dd42733', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1532.532637] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Creating folder: Project (3fe8537857034ada970b516fcf2fce57). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1532.532909] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-809c908a-6c5f-4930-aaee-05923eb0b58f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.543495] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Created folder: Project (3fe8537857034ada970b516fcf2fce57) in parent group-v368199. [ 1532.543675] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Creating folder: Instances. Parent ref: group-v368359. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1532.543910] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2d50a30-25b2-4b32-8a3a-e3804fca4624 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.553213] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Created folder: Instances in parent group-v368359. [ 1532.553491] env[63371]: DEBUG oslo.service.loopingcall [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1532.553694] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1532.553938] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5930d71-9d18-47ee-96dd-27d319d61dd0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.580207] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527f3e64-a77e-3b37-dfd6-dbe8fc4acd15, 'name': SearchDatastore_Task, 'duration_secs': 0.012336} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1532.581519] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1532.581762] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1532.581994] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.582161] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.582335] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1532.582568] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1532.582568] env[63371]: value = "task-1774113" [ 1532.582568] env[63371]: _type = "Task" [ 1532.582568] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.582742] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c5bcc0c-a561-48b3-b5db-2573de0810d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.592662] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774113, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.594029] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1532.594029] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1532.594677] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8549fa25-26c3-42e2-9082-ce545ebfced4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.599670] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1532.599670] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525e6f78-8c41-ff53-ed62-638ca80e7c15" [ 1532.599670] env[63371]: _type = "Task" [ 1532.599670] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1532.607942] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525e6f78-8c41-ff53-ed62-638ca80e7c15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.631742] env[63371]: INFO nova.compute.manager [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Rebuilding instance [ 1532.676303] env[63371]: DEBUG nova.compute.manager [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1532.676525] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685d9f79-cc20-4a26-bb64-774e95cdb1aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1532.715434] env[63371]: DEBUG nova.compute.manager [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Received event network-changed-dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1532.715644] env[63371]: DEBUG nova.compute.manager [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Refreshing instance network info cache due to event network-changed-dc1a6185-a139-4788-bbd2-d5540dd42733. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1532.716088] env[63371]: DEBUG oslo_concurrency.lockutils [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] Acquiring lock "refresh_cache-d6bc618e-33c9-4b45-b79f-afe6811acd4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1532.716088] env[63371]: DEBUG oslo_concurrency.lockutils [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] Acquired lock "refresh_cache-d6bc618e-33c9-4b45-b79f-afe6811acd4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1532.716216] env[63371]: DEBUG nova.network.neutron [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Refreshing network info cache for port dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1532.811880] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774108, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1532.882214] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1533.013444] env[63371]: DEBUG oslo_vmware.api [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Task: {'id': task-1774110, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14331} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.013444] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1533.013444] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1533.013444] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1533.013444] env[63371]: INFO nova.compute.manager [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1533.013444] env[63371]: DEBUG oslo.service.loopingcall [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1533.013444] env[63371]: DEBUG nova.compute.manager [-] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1533.013444] env[63371]: DEBUG nova.network.neutron [-] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1533.052813] env[63371]: DEBUG nova.network.neutron [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Updated VIF entry in instance network info cache for port f7958f5c-d0af-44e7-bbb2-e6fa265a6da3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1533.052813] env[63371]: DEBUG nova.network.neutron [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Updating instance_info_cache with network_info: [{"id": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "address": "fa:16:3e:1a:60:7b", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7958f5c-d0", "ovs_interfaceid": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.098903] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774113, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.114059] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525e6f78-8c41-ff53-ed62-638ca80e7c15, 'name': SearchDatastore_Task, 'duration_secs': 0.009133} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.114888] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-649afccd-c29d-425b-bd1f-7c4d33e9d4e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.122946] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1533.122946] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524a6eb1-03ae-b3b2-5115-a962e877dad7" [ 1533.122946] env[63371]: _type = "Task" [ 1533.122946] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.132204] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524a6eb1-03ae-b3b2-5115-a962e877dad7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.187608] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1533.191403] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67fa86c4-8196-43c7-9f6b-fa1838db2d04 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.199723] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1533.199723] env[63371]: value = "task-1774114" [ 1533.199723] env[63371]: _type = "Task" [ 1533.199723] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.213063] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774114, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.311724] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774108, 'name': ReconfigVM_Task, 'duration_secs': 0.686826} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.311954] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed/150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1533.312620] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a03479b-ed88-4b85-bb78-fa6eea70ed76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.325206] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1533.325206] env[63371]: value = "task-1774115" [ 1533.325206] env[63371]: _type = "Task" [ 1533.325206] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.335923] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774115, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.427999] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b4ad38-eb86-4ab4-a530-9554a180af29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.442502] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531fc2b3-d088-429c-8cc8-f659d246986c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.477760] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8413822-50a4-4c92-a7b4-d9d61f5a39e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.488040] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927f0121-76f7-4671-8b93-a7f0a6d7349d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.509573] env[63371]: DEBUG nova.compute.provider_tree [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1533.541034] env[63371]: DEBUG nova.network.neutron [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Updated VIF entry in instance network info cache for port dc1a6185-a139-4788-bbd2-d5540dd42733. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1533.541156] env[63371]: DEBUG nova.network.neutron [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Updating instance_info_cache with network_info: [{"id": "dc1a6185-a139-4788-bbd2-d5540dd42733", "address": "fa:16:3e:ec:b8:32", "network": {"id": "6f1d0296-7fae-445e-95cd-2ec84607e8cb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1953892032-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fe8537857034ada970b516fcf2fce57", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d33839ae-40ca-471b-92e3-eb282b920682", "external-id": "nsx-vlan-transportzone-416", "segmentation_id": 416, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdc1a6185-a1", "ovs_interfaceid": "dc1a6185-a139-4788-bbd2-d5540dd42733", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.556885] env[63371]: DEBUG oslo_concurrency.lockutils [req-5ee86350-55bc-446b-9bb5-08238f3ba1a4 req-dc3cc438-d767-4cdb-a2a8-b01ce9b875e6 service nova] Releasing lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.595704] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774113, 'name': CreateVM_Task, 'duration_secs': 0.655521} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.595880] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1533.596754] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1533.596984] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1533.597341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1533.597638] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ed72527-067b-47a9-9842-ac6035c2022a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.602188] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1533.602188] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528eb6ea-b724-47c9-9838-b081a6550eca" [ 1533.602188] env[63371]: _type = "Task" [ 1533.602188] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.609703] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528eb6ea-b724-47c9-9838-b081a6550eca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.615554] env[63371]: DEBUG nova.compute.manager [req-cfa2c2c4-4ff3-430e-a3ad-5f7c3a7a8b07 req-8183c2ac-a7e1-453c-b1fe-a1a84484b139 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Received event network-vif-deleted-d11a5154-6b30-4190-925a-4a07bc31709e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1533.615741] env[63371]: INFO nova.compute.manager [req-cfa2c2c4-4ff3-430e-a3ad-5f7c3a7a8b07 req-8183c2ac-a7e1-453c-b1fe-a1a84484b139 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Neutron deleted interface d11a5154-6b30-4190-925a-4a07bc31709e; detaching it from the instance and deleting it from the info cache [ 1533.615905] env[63371]: DEBUG nova.network.neutron [req-cfa2c2c4-4ff3-430e-a3ad-5f7c3a7a8b07 req-8183c2ac-a7e1-453c-b1fe-a1a84484b139 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1533.634307] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524a6eb1-03ae-b3b2-5115-a962e877dad7, 'name': SearchDatastore_Task, 'duration_secs': 0.009419} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.634531] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1533.634817] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1c93487b-6d8f-424d-8b95-10bfb894c609/1c93487b-6d8f-424d-8b95-10bfb894c609.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1533.635103] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0627af8f-728c-4540-a351-0a0ae87796ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.642697] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1533.642697] env[63371]: value = "task-1774116" [ 1533.642697] env[63371]: _type = "Task" [ 1533.642697] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.651489] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774116, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.709671] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774114, 'name': PowerOffVM_Task, 'duration_secs': 0.195239} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.710337] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1533.710337] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1533.710969] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac0e34f-3eea-426d-99aa-f0be8aa101e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.717622] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1533.717853] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc5a9a0c-8190-4ca1-9b24-86123a98e099 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.791875] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1533.792198] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1533.792441] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleting the datastore file [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1533.792738] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fdc9ace-bdaf-4aa3-be10-319f234c56ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.799768] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1533.799768] env[63371]: value = "task-1774118" [ 1533.799768] env[63371]: _type = "Task" [ 1533.799768] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.808601] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.834764] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774115, 'name': Rename_Task, 'duration_secs': 0.202837} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1533.835120] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1533.835393] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d514c97-d3c5-458f-8bb6-154a42a2a013 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1533.842686] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1533.842686] env[63371]: value = "task-1774119" [ 1533.842686] env[63371]: _type = "Task" [ 1533.842686] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1533.851727] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1533.936468] env[63371]: DEBUG nova.network.neutron [-] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.020324] env[63371]: DEBUG nova.scheduler.client.report [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1534.044283] env[63371]: DEBUG oslo_concurrency.lockutils [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] Releasing lock "refresh_cache-d6bc618e-33c9-4b45-b79f-afe6811acd4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.044518] env[63371]: DEBUG nova.compute.manager [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Received event network-changed-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1534.044663] env[63371]: DEBUG nova.compute.manager [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Refreshing instance network info cache due to event network-changed-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1534.045089] env[63371]: DEBUG oslo_concurrency.lockutils [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] Acquiring lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.045089] env[63371]: DEBUG oslo_concurrency.lockutils [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] Acquired lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.045301] env[63371]: DEBUG nova.network.neutron [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Refreshing network info cache for port 8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1534.120136] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528eb6ea-b724-47c9-9838-b081a6550eca, 'name': SearchDatastore_Task, 'duration_secs': 0.009772} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.120136] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-daf0f2c6-7d27-485e-9abb-c0c926c99e40 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.121776] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.122026] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1534.122255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1534.122392] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1534.122559] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1534.122811] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4de41325-1d7d-489b-8c0d-2cff9a811639 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.132019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2672b46-209d-4371-9bce-b76eeb80f028 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.144568] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1534.145729] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1534.149026] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b6a0a50-6944-461c-9fe0-447001a43017 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.156258] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774116, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.468693} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.168685] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1c93487b-6d8f-424d-8b95-10bfb894c609/1c93487b-6d8f-424d-8b95-10bfb894c609.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1534.168928] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1534.169318] env[63371]: DEBUG nova.compute.manager [req-cfa2c2c4-4ff3-430e-a3ad-5f7c3a7a8b07 req-8183c2ac-a7e1-453c-b1fe-a1a84484b139 service nova] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Detach interface failed, port_id=d11a5154-6b30-4190-925a-4a07bc31709e, reason: Instance 0e2c8ced-198f-43be-9d41-703a7c590df4 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1534.169778] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1534.169778] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52886e6c-f58a-edcc-d930-03a8683298ee" [ 1534.169778] env[63371]: _type = "Task" [ 1534.169778] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.169973] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aae3e331-3d85-4ae1-b33a-595aa2ae758f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.182291] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52886e6c-f58a-edcc-d930-03a8683298ee, 'name': SearchDatastore_Task, 'duration_secs': 0.01014} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.184034] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1534.184034] env[63371]: value = "task-1774120" [ 1534.184034] env[63371]: _type = "Task" [ 1534.184034] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.184245] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5ed4e2e-23f2-4231-b377-a2ef85ea8d83 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.191911] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1534.191911] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b898c5-abc0-2dea-a202-a0cb095ce410" [ 1534.191911] env[63371]: _type = "Task" [ 1534.191911] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.195314] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774120, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.203134] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b898c5-abc0-2dea-a202-a0cb095ce410, 'name': SearchDatastore_Task, 'duration_secs': 0.008783} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.203398] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1534.203650] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] d6bc618e-33c9-4b45-b79f-afe6811acd4e/d6bc618e-33c9-4b45-b79f-afe6811acd4e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1534.204107] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dbb4ebe8-771f-4873-936f-5e4637500ab0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.210670] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1534.210670] env[63371]: value = "task-1774121" [ 1534.210670] env[63371]: _type = "Task" [ 1534.210670] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.219074] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774121, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.309839] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.412171} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.310129] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1534.310349] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1534.310482] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1534.362695] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774119, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.440050] env[63371]: INFO nova.compute.manager [-] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Took 1.43 seconds to deallocate network for instance. [ 1534.531353] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.604s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1534.532026] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1534.537558] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 30.570s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1534.696651] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774120, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059131} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.696882] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1534.697984] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ee7de4-b741-42ab-ab2c-d10a2034b421 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.725393] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 1c93487b-6d8f-424d-8b95-10bfb894c609/1c93487b-6d8f-424d-8b95-10bfb894c609.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1534.734480] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5db3f33-992c-4310-a5b5-5240c3097655 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.757068] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774121, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478292} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.758857] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] d6bc618e-33c9-4b45-b79f-afe6811acd4e/d6bc618e-33c9-4b45-b79f-afe6811acd4e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1534.759176] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1534.759955] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1534.759955] env[63371]: value = "task-1774122" [ 1534.759955] env[63371]: _type = "Task" [ 1534.759955] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.760224] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c96f4941-029e-476f-b0f6-c7914d3dc308 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.772435] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774122, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.773771] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1534.773771] env[63371]: value = "task-1774123" [ 1534.773771] env[63371]: _type = "Task" [ 1534.773771] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1534.782538] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774123, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1534.858121] env[63371]: DEBUG oslo_vmware.api [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774119, 'name': PowerOnVM_Task, 'duration_secs': 0.542429} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1534.858611] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1534.858964] env[63371]: INFO nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Took 10.62 seconds to spawn the instance on the hypervisor. [ 1534.859289] env[63371]: DEBUG nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1534.861127] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299f7e8a-7b30-432f-aef5-d75eab735331 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1534.914610] env[63371]: DEBUG nova.network.neutron [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Updated VIF entry in instance network info cache for port 8e9291f7-154c-4bfa-bfd8-f09dbd9b4963. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1534.915177] env[63371]: DEBUG nova.network.neutron [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Updating instance_info_cache with network_info: [{"id": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "address": "fa:16:3e:33:87:bc", "network": {"id": "7148c44a-bf33-4a93-a06b-2ca6d723c269", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-602295530-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.171", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b5c5bf80b8e64c8795da4d79d6a89150", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9d50784-eb90-48ae-a4ea-2125c52a50d7", "external-id": "nsx-vlan-transportzone-657", "segmentation_id": 657, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8e9291f7-15", "ovs_interfaceid": "8e9291f7-154c-4bfa-bfd8-f09dbd9b4963", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1534.946390] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1535.044018] env[63371]: DEBUG nova.compute.utils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1535.046414] env[63371]: INFO nova.compute.claims [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1535.050143] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1535.050143] env[63371]: DEBUG nova.network.neutron [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1535.115386] env[63371]: DEBUG nova.policy [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25f28e53648c41d1a147c1aa04f0a708', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fb0da840f6847f19f03a1db8a1c3f4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1535.272647] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774122, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.283024] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774123, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071083} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.283682] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1535.284178] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5515c089-1600-4491-b797-d5a7656535fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.312081] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] d6bc618e-33c9-4b45-b79f-afe6811acd4e/d6bc618e-33c9-4b45-b79f-afe6811acd4e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1535.312423] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-112c2c8f-0c36-4075-a8f9-f018f1467742 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.338658] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1535.338658] env[63371]: value = "task-1774124" [ 1535.338658] env[63371]: _type = "Task" [ 1535.338658] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.347667] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774124, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.363816] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1535.364068] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1535.364227] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1535.364402] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1535.364540] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1535.364681] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1535.364881] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1535.369410] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1535.369691] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1535.369874] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1535.370086] env[63371]: DEBUG nova.virt.hardware [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1535.371018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0dd244f-ba12-451a-8e64-cfe864d42430 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.385618] env[63371]: INFO nova.compute.manager [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Took 47.26 seconds to build instance. [ 1535.387595] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3165c5dc-6355-4d35-acd9-41f0cb60d854 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.405189] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:61:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '12bfc72d-5ca7-4f11-8259-77887b5af47c', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1535.412774] env[63371]: DEBUG oslo.service.loopingcall [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1535.413756] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1535.413998] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-01638d82-cbf9-44aa-b3b4-04865c3e41a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.429260] env[63371]: DEBUG oslo_concurrency.lockutils [req-d45a5f3f-ee9b-41aa-95ef-657989f8c3d5 req-14f44164-1de4-494b-8602-c983a226403d service nova] Releasing lock "refresh_cache-195de525-1081-4db6-acf3-04a6d3eb142f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.436976] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1535.436976] env[63371]: value = "task-1774125" [ 1535.436976] env[63371]: _type = "Task" [ 1535.436976] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.443446] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774125, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.479728] env[63371]: DEBUG nova.network.neutron [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Successfully created port: d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1535.551333] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1535.560268] env[63371]: INFO nova.compute.resource_tracker [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating resource usage from migration 0d1a44d4-1ccf-4ed5-a60b-ac0e82931d09 [ 1535.774733] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774122, 'name': ReconfigVM_Task, 'duration_secs': 0.969393} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.775183] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 1c93487b-6d8f-424d-8b95-10bfb894c609/1c93487b-6d8f-424d-8b95-10bfb894c609.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1535.775664] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f2c3c69-8530-4d10-b492-8db7f00ddeb9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.781430] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1535.781430] env[63371]: value = "task-1774126" [ 1535.781430] env[63371]: _type = "Task" [ 1535.781430] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.789460] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774126, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.852215] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1535.893170] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f98215c-cc86-49b5-9a75-1e2f2f31f2f1 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.039s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1535.948321] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774125, 'name': CreateVM_Task, 'duration_secs': 0.343543} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.950764] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1535.951861] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.952873] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.952873] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1535.952873] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58304dd6-87dc-4e3f-9eab-04307766c671 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.957968] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1535.957968] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528e388e-9283-6131-6da8-32ec00f7061e" [ 1535.957968] env[63371]: _type = "Task" [ 1535.957968] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.970075] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528e388e-9283-6131-6da8-32ec00f7061e, 'name': SearchDatastore_Task, 'duration_secs': 0.008953} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.970477] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1535.970559] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1535.970782] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1535.973211] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1535.973211] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1535.973211] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00aa3121-9cc8-45fa-b339-d6940110fc08 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.978914] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1535.980293] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1535.980293] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40fef0f1-a5c2-4fbb-842f-c6b0df6fe489 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.986816] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1535.986816] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528ef330-659b-114d-43aa-5ae4f31fb44b" [ 1535.986816] env[63371]: _type = "Task" [ 1535.986816] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1535.989119] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42107181-9fe2-4e33-9505-b00eb2d9ddcb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1535.996422] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528ef330-659b-114d-43aa-5ae4f31fb44b, 'name': SearchDatastore_Task, 'duration_secs': 0.009241} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1535.998740] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c99d0a8b-0df9-4066-a9c9-0519afc74921 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.001845] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62c6f5f7-770e-4e19-9b61-e05aac6a4066 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.007849] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1536.007849] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c99632-d529-a0d2-d559-d322e89317d8" [ 1536.007849] env[63371]: _type = "Task" [ 1536.007849] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.035748] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f3e366-0a73-416d-b6e3-ca3a27952033 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.045846] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c99632-d529-a0d2-d559-d322e89317d8, 'name': SearchDatastore_Task, 'duration_secs': 0.00847} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.046389] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1536.046738] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1536.048153] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c94209-c881-4ec4-92da-2e42b5985339 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.053353] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-784cdd07-70b7-4c15-a648-a28547329e74 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.069482] env[63371]: DEBUG nova.compute.provider_tree [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1536.071861] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1536.071861] env[63371]: value = "task-1774127" [ 1536.071861] env[63371]: _type = "Task" [ 1536.071861] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.081889] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.292875] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774126, 'name': Rename_Task, 'duration_secs': 0.297282} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.293224] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1536.293486] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e09bb18-55e3-48a7-a97a-cfa47f6a7390 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.300583] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1536.300583] env[63371]: value = "task-1774128" [ 1536.300583] env[63371]: _type = "Task" [ 1536.300583] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.313628] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774128, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.352053] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774124, 'name': ReconfigVM_Task, 'duration_secs': 0.569045} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.352426] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Reconfigured VM instance instance-00000039 to attach disk [datastore1] d6bc618e-33c9-4b45-b79f-afe6811acd4e/d6bc618e-33c9-4b45-b79f-afe6811acd4e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1536.353328] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d50c43f7-3d0f-4ef1-82c7-0ff797c6cf1c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.361267] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1536.361267] env[63371]: value = "task-1774129" [ 1536.361267] env[63371]: _type = "Task" [ 1536.361267] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.374385] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774129, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.397172] env[63371]: DEBUG nova.compute.manager [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1536.574079] env[63371]: DEBUG nova.scheduler.client.report [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1536.579692] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1536.601924] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532993} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.602289] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1536.602511] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1536.605723] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9635367a-1640-4a35-a53c-85f10971897a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.611462] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1536.611462] env[63371]: value = "task-1774130" [ 1536.611462] env[63371]: _type = "Task" [ 1536.611462] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.616589] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1536.616816] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1536.616969] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1536.617195] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1536.617343] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1536.617488] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1536.617691] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1536.617842] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1536.618011] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1536.618179] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1536.618345] env[63371]: DEBUG nova.virt.hardware [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1536.619521] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d98f21-9ad2-4c88-a8bb-3646645da778 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.628515] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774130, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.631666] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25f07be2-69ba-4e97-8d1d-26c775b96a43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.812018] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774128, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.871311] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774129, 'name': Rename_Task, 'duration_secs': 0.304137} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1536.871690] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1536.871926] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3dd5803-887f-4e57-a80b-397bdb571a03 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1536.879582] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1536.879582] env[63371]: value = "task-1774131" [ 1536.879582] env[63371]: _type = "Task" [ 1536.879582] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1536.888709] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1536.923637] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1537.023069] env[63371]: DEBUG nova.compute.manager [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Received event network-changed-cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1537.023317] env[63371]: DEBUG nova.compute.manager [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Refreshing instance network info cache due to event network-changed-cf8050ea-381c-487b-9981-c3f042d673e1. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1537.023559] env[63371]: DEBUG oslo_concurrency.lockutils [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] Acquiring lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.023769] env[63371]: DEBUG oslo_concurrency.lockutils [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] Acquired lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.023878] env[63371]: DEBUG nova.network.neutron [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Refreshing network info cache for port cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1537.091028] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.551s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1537.091028] env[63371]: INFO nova.compute.manager [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Migrating [ 1537.098238] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.246s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1537.098715] env[63371]: DEBUG nova.objects.instance [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lazy-loading 'resources' on Instance uuid f8119ade-7018-4ad8-82fe-baa0a6753c64 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1537.127269] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774130, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062466} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.128164] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1537.129731] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c2cde4-6206-4f0e-8844-35e2e59f68c8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.161085] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1537.162433] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8595e640-e338-4caf-9a2a-8681e2ed6e52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.185470] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1537.185470] env[63371]: value = "task-1774132" [ 1537.185470] env[63371]: _type = "Task" [ 1537.185470] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.189554] env[63371]: DEBUG nova.network.neutron [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Successfully updated port: d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1537.196276] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774132, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.311014] env[63371]: DEBUG oslo_vmware.api [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774128, 'name': PowerOnVM_Task, 'duration_secs': 0.838877} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.311802] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1537.311802] env[63371]: INFO nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Took 10.40 seconds to spawn the instance on the hypervisor. [ 1537.311802] env[63371]: DEBUG nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1537.312689] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7148668-9aa6-4982-a4b2-ac7785c79d4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.394030] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774131, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.612693] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.612892] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.613093] env[63371]: DEBUG nova.network.neutron [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1537.695732] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774132, 'name': ReconfigVM_Task, 'duration_secs': 0.319853} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.700226] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1/574121c4-c721-4d30-81ec-3f2310a7b6d1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1537.701179] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "refresh_cache-b880750e-7bf4-412c-bcff-eb2c343f60f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1537.701310] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "refresh_cache-b880750e-7bf4-412c-bcff-eb2c343f60f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1537.701448] env[63371]: DEBUG nova.network.neutron [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1537.703028] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3d5d6b38-25f9-498c-ac44-3e5946935cd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1537.709694] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1537.709694] env[63371]: value = "task-1774133" [ 1537.709694] env[63371]: _type = "Task" [ 1537.709694] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1537.721093] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774133, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1537.810516] env[63371]: DEBUG nova.network.neutron [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Updated VIF entry in instance network info cache for port cf8050ea-381c-487b-9981-c3f042d673e1. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1537.810899] env[63371]: DEBUG nova.network.neutron [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Updating instance_info_cache with network_info: [{"id": "cf8050ea-381c-487b-9981-c3f042d673e1", "address": "fa:16:3e:86:04:09", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf8050ea-38", "ovs_interfaceid": "cf8050ea-381c-487b-9981-c3f042d673e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1537.829373] env[63371]: INFO nova.compute.manager [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Took 46.78 seconds to build instance. [ 1537.889670] env[63371]: DEBUG oslo_vmware.api [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774131, 'name': PowerOnVM_Task, 'duration_secs': 0.624459} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1537.893653] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1537.893653] env[63371]: INFO nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Took 8.27 seconds to spawn the instance on the hypervisor. [ 1537.893653] env[63371]: DEBUG nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1537.893653] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d9d4a4-e904-46d2-8b7f-00d2a3e834f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.009077] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7137fa8a-b7ee-4dc2-a46e-5a52556e6221 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.016503] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-166855c3-7982-4f3d-81b5-7ddc13c7d14a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.046722] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e77b080-82e2-4f5c-8d64-64177352dfa1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.054974] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87764988-ac9a-4da6-9f95-b427106a45cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.069964] env[63371]: DEBUG nova.compute.provider_tree [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1538.221774] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774133, 'name': Rename_Task, 'duration_secs': 0.135929} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.222137] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1538.222754] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e065fd82-de84-43e6-84be-1b13ef3408eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.228708] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1538.228708] env[63371]: value = "task-1774134" [ 1538.228708] env[63371]: _type = "Task" [ 1538.228708] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.237186] env[63371]: DEBUG nova.network.neutron [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1538.243976] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774134, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1538.302210] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] Acquiring lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1538.302341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] Acquired lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1538.302503] env[63371]: DEBUG nova.network.neutron [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1538.315536] env[63371]: DEBUG oslo_concurrency.lockutils [req-c8b38454-d80a-405f-8b5e-189a5ab58c8f req-dfca9470-bd06-4976-a752-76d00a4efbc9 service nova] Releasing lock "refresh_cache-150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.333595] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f599e129-1707-4ba8-b06c-58d3fdaf5892 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "1c93487b-6d8f-424d-8b95-10bfb894c609" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.056s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.382777] env[63371]: DEBUG nova.network.neutron [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.410064] env[63371]: DEBUG nova.network.neutron [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Updating instance_info_cache with network_info: [{"id": "d233c17c-a3d0-4e06-8087-721a7808298d", "address": "fa:16:3e:5b:85:bf", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd233c17c-a3", "ovs_interfaceid": "d233c17c-a3d0-4e06-8087-721a7808298d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1538.410814] env[63371]: INFO nova.compute.manager [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Took 38.18 seconds to build instance. [ 1538.573113] env[63371]: DEBUG nova.scheduler.client.report [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1538.738980] env[63371]: DEBUG oslo_vmware.api [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774134, 'name': PowerOnVM_Task, 'duration_secs': 0.467099} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1538.739488] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1538.739809] env[63371]: DEBUG nova.compute.manager [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1538.740717] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326ffabc-9943-44ca-a388-67000eb6beb4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.836352] env[63371]: DEBUG nova.compute.manager [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1538.886720] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.913083] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "refresh_cache-b880750e-7bf4-412c-bcff-eb2c343f60f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1538.913420] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Instance network_info: |[{"id": "d233c17c-a3d0-4e06-8087-721a7808298d", "address": "fa:16:3e:5b:85:bf", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd233c17c-a3", "ovs_interfaceid": "d233c17c-a3d0-4e06-8087-721a7808298d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1538.913866] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4fdb0d2e-470a-42b9-b2d7-f0e433e9ddd1 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.691s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1538.914184] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:85:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd233c17c-a3d0-4e06-8087-721a7808298d', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1538.921886] env[63371]: DEBUG oslo.service.loopingcall [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1538.924444] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1538.924693] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4db4ca4-8aea-4794-a3a5-cbb3a342f356 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1538.948390] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1538.948390] env[63371]: value = "task-1774135" [ 1538.948390] env[63371]: _type = "Task" [ 1538.948390] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1538.956197] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774135, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.074614] env[63371]: DEBUG nova.network.neutron [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Updating instance_info_cache with network_info: [{"id": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "address": "fa:16:3e:1a:60:7b", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7958f5c-d0", "ovs_interfaceid": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.078521] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.980s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.080267] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.885s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.082097] env[63371]: INFO nova.compute.claims [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1539.102187] env[63371]: INFO nova.scheduler.client.report [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Deleted allocations for instance f8119ade-7018-4ad8-82fe-baa0a6753c64 [ 1539.122157] env[63371]: DEBUG nova.compute.manager [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Received event network-vif-plugged-d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1539.122157] env[63371]: DEBUG oslo_concurrency.lockutils [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] Acquiring lock "b880750e-7bf4-412c-bcff-eb2c343f60f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.122157] env[63371]: DEBUG oslo_concurrency.lockutils [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.122157] env[63371]: DEBUG oslo_concurrency.lockutils [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.122706] env[63371]: DEBUG nova.compute.manager [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] No waiting events found dispatching network-vif-plugged-d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1539.122706] env[63371]: WARNING nova.compute.manager [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Received unexpected event network-vif-plugged-d233c17c-a3d0-4e06-8087-721a7808298d for instance with vm_state building and task_state spawning. [ 1539.122706] env[63371]: DEBUG nova.compute.manager [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Received event network-changed-d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1539.122706] env[63371]: DEBUG nova.compute.manager [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Refreshing instance network info cache due to event network-changed-d233c17c-a3d0-4e06-8087-721a7808298d. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1539.122882] env[63371]: DEBUG oslo_concurrency.lockutils [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] Acquiring lock "refresh_cache-b880750e-7bf4-412c-bcff-eb2c343f60f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.123264] env[63371]: DEBUG oslo_concurrency.lockutils [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] Acquired lock "refresh_cache-b880750e-7bf4-412c-bcff-eb2c343f60f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.125718] env[63371]: DEBUG nova.network.neutron [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Refreshing network info cache for port d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1539.257818] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.356078] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.459278] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774135, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.576986] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] Releasing lock "refresh_cache-1c93487b-6d8f-424d-8b95-10bfb894c609" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1539.577204] env[63371]: DEBUG nova.compute.manager [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Inject network info {{(pid=63371) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1539.577461] env[63371]: DEBUG nova.compute.manager [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] network_info to inject: |[{"id": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "address": "fa:16:3e:1a:60:7b", "network": {"id": "4c85519f-565e-4cd3-978c-59b671683d27", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2021428401-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a4ca8a73414142d497ebd3d3f043d9ae", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ccc0e97b-b21d-4557-a4d4-fd7e8f973368", "external-id": "nsx-vlan-transportzone-380", "segmentation_id": 380, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7958f5c-d0", "ovs_interfaceid": "f7958f5c-d0af-44e7-bbb2-e6fa265a6da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1539.582911] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Reconfiguring VM instance to set the machine id {{(pid=63371) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1539.583748] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e9037c3-7acc-43d0-b48e-71bda53ea7db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.604641] env[63371]: DEBUG oslo_vmware.api [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] Waiting for the task: (returnval){ [ 1539.604641] env[63371]: value = "task-1774136" [ 1539.604641] env[63371]: _type = "Task" [ 1539.604641] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.617960] env[63371]: DEBUG oslo_vmware.api [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] Task: {'id': task-1774136, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.618658] env[63371]: DEBUG oslo_concurrency.lockutils [None req-495c8593-f8d3-418b-bd9e-ae0bd01f8c8a tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "f8119ade-7018-4ad8-82fe-baa0a6753c64" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.601s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.737438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.737741] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.737955] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1539.738162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1539.738331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1539.742374] env[63371]: INFO nova.compute.manager [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Terminating instance [ 1539.746201] env[63371]: DEBUG nova.compute.manager [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1539.746370] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1539.747249] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c08917-09c4-4b89-b4de-78c470a39eab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.757143] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1539.757399] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6683cc49-236f-4403-a4fe-6ac9aa2c2354 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.763573] env[63371]: DEBUG oslo_vmware.api [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1539.763573] env[63371]: value = "task-1774137" [ 1539.763573] env[63371]: _type = "Task" [ 1539.763573] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.771902] env[63371]: DEBUG oslo_vmware.api [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774137, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1539.944395] env[63371]: DEBUG nova.network.neutron [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Updated VIF entry in instance network info cache for port d233c17c-a3d0-4e06-8087-721a7808298d. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1539.944904] env[63371]: DEBUG nova.network.neutron [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Updating instance_info_cache with network_info: [{"id": "d233c17c-a3d0-4e06-8087-721a7808298d", "address": "fa:16:3e:5b:85:bf", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd233c17c-a3", "ovs_interfaceid": "d233c17c-a3d0-4e06-8087-721a7808298d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1539.964241] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774135, 'name': CreateVM_Task, 'duration_secs': 0.990578} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1539.964507] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1539.966084] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1539.966554] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1539.966917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1539.967548] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ffe9340-35fe-4746-9f18-b8c20bf88ac9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1539.973168] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1539.973168] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522019e2-946d-d608-9ac2-86e86d560b6a" [ 1539.973168] env[63371]: _type = "Task" [ 1539.973168] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1539.981951] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522019e2-946d-d608-9ac2-86e86d560b6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.013583] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "574121c4-c721-4d30-81ec-3f2310a7b6d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.013846] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.014063] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "574121c4-c721-4d30-81ec-3f2310a7b6d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.014339] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.014410] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1540.016559] env[63371]: INFO nova.compute.manager [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Terminating instance [ 1540.018489] env[63371]: DEBUG nova.compute.manager [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1540.018561] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1540.019427] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11010db-6e37-4330-9003-c48682f7bb2b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.027117] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1540.027370] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2997a741-d0f9-4beb-8a1f-c3c4d00ea682 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.033287] env[63371]: DEBUG oslo_vmware.api [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1540.033287] env[63371]: value = "task-1774138" [ 1540.033287] env[63371]: _type = "Task" [ 1540.033287] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.042930] env[63371]: DEBUG oslo_vmware.api [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774138, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.114183] env[63371]: DEBUG oslo_vmware.api [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] Task: {'id': task-1774136, 'name': ReconfigVM_Task, 'duration_secs': 0.251158} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.114470] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2a0471c5-e975-4d7b-b46b-7b1680904c1c tempest-ServersAdminTestJSON-1440925087 tempest-ServersAdminTestJSON-1440925087-project-admin] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Reconfigured VM instance to set the machine id {{(pid=63371) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1540.275203] env[63371]: DEBUG oslo_vmware.api [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774137, 'name': PowerOffVM_Task, 'duration_secs': 0.256743} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.275528] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1540.275746] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1540.276018] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-418aa394-e8a6-42c2-89bf-070236a1d4ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.353226] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1540.353226] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1540.353226] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Deleting the datastore file [datastore1] d6bc618e-33c9-4b45-b79f-afe6811acd4e {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1540.353226] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a89f8511-c365-4d7d-a1f9-b5ea627f89cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.359169] env[63371]: DEBUG oslo_vmware.api [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for the task: (returnval){ [ 1540.359169] env[63371]: value = "task-1774140" [ 1540.359169] env[63371]: _type = "Task" [ 1540.359169] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.368072] env[63371]: DEBUG oslo_vmware.api [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774140, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.406103] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f23d8ab-9a86-4e34-b694-9118ece1f9c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.430067] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 0 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1540.448500] env[63371]: DEBUG oslo_concurrency.lockutils [req-1440bd4e-bd32-4e63-86f9-c26135a15725 req-5424d4c0-6f20-4829-9f42-d6b9422f92c8 service nova] Releasing lock "refresh_cache-b880750e-7bf4-412c-bcff-eb2c343f60f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.487174] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522019e2-946d-d608-9ac2-86e86d560b6a, 'name': SearchDatastore_Task, 'duration_secs': 0.010063} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.487174] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1540.487174] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1540.487174] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1540.487174] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1540.487174] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1540.487174] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a88001c8-3785-4045-bb46-ad05a9ec5406 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.495843] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1540.496036] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1540.496776] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53c089bc-6b04-434a-82e6-98d5cebc5881 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.501631] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1540.501631] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521cedc0-b273-11d3-c3ba-3e45759062cf" [ 1540.501631] env[63371]: _type = "Task" [ 1540.501631] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.512462] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521cedc0-b273-11d3-c3ba-3e45759062cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.541723] env[63371]: DEBUG oslo_vmware.api [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774138, 'name': PowerOffVM_Task, 'duration_secs': 0.18904} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.544483] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1540.544657] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1540.545071] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-81650a86-a120-4512-b759-fae844c3f024 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.566228] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f6b847-b367-4e27-8a13-53a654b028e7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.576981] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735a2dc8-d3f7-4c09-beb6-07ad32967c0a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.610114] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f71d4a2a-fcfc-4417-a9f7-19a7839b7c7f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.618155] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059915ec-98d3-41be-b4d8-d7074bcc70ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.624039] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1540.624253] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1540.624425] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleting the datastore file [datastore1] 574121c4-c721-4d30-81ec-3f2310a7b6d1 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1540.624659] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-06219820-e841-455b-8534-bc63ef6cde71 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.633915] env[63371]: DEBUG nova.compute.provider_tree [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1540.640435] env[63371]: DEBUG oslo_vmware.api [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1540.640435] env[63371]: value = "task-1774142" [ 1540.640435] env[63371]: _type = "Task" [ 1540.640435] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.650126] env[63371]: DEBUG oslo_vmware.api [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774142, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1540.828491] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1540.828718] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1540.870246] env[63371]: DEBUG oslo_vmware.api [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Task: {'id': task-1774140, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134468} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1540.870519] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1540.870706] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1540.870889] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1540.871231] env[63371]: INFO nova.compute.manager [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1540.871377] env[63371]: DEBUG oslo.service.loopingcall [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1540.871537] env[63371]: DEBUG nova.compute.manager [-] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1540.871655] env[63371]: DEBUG nova.network.neutron [-] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1540.938122] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1540.938762] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70e494ce-6bbc-4242-b0aa-3eced46f24f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1540.946680] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1540.946680] env[63371]: value = "task-1774143" [ 1540.946680] env[63371]: _type = "Task" [ 1540.946680] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1540.969659] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774143, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.013925] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521cedc0-b273-11d3-c3ba-3e45759062cf, 'name': SearchDatastore_Task, 'duration_secs': 0.012642} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.014787] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc0cfb16-fe47-4588-8cb0-a2d7ecace979 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.020088] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1541.020088] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52260ed7-df1f-ab96-579a-69cf8db35b11" [ 1541.020088] env[63371]: _type = "Task" [ 1541.020088] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.027985] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52260ed7-df1f-ab96-579a-69cf8db35b11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.148652] env[63371]: DEBUG oslo_vmware.api [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774142, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168228} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.148909] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1541.149158] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1541.149367] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1541.149569] env[63371]: INFO nova.compute.manager [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1541.149867] env[63371]: DEBUG oslo.service.loopingcall [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1541.150110] env[63371]: DEBUG nova.compute.manager [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1541.150287] env[63371]: DEBUG nova.network.neutron [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1541.164374] env[63371]: ERROR nova.scheduler.client.report [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [req-bb87a2c2-7514-4a8a-80b8-d3f5a414aa05] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bb87a2c2-7514-4a8a-80b8-d3f5a414aa05"}]} [ 1541.191150] env[63371]: DEBUG nova.scheduler.client.report [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1541.205387] env[63371]: DEBUG nova.scheduler.client.report [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1541.205644] env[63371]: DEBUG nova.compute.provider_tree [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1541.224590] env[63371]: DEBUG nova.scheduler.client.report [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1541.248461] env[63371]: DEBUG nova.scheduler.client.report [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1541.330792] env[63371]: DEBUG nova.compute.manager [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1541.352216] env[63371]: DEBUG nova.compute.manager [req-39cf6508-acdb-4fad-9d49-c0e9cfb83db1 req-543a3886-7a24-4456-8914-1157c85438ee service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Received event network-vif-deleted-dc1a6185-a139-4788-bbd2-d5540dd42733 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1541.352216] env[63371]: INFO nova.compute.manager [req-39cf6508-acdb-4fad-9d49-c0e9cfb83db1 req-543a3886-7a24-4456-8914-1157c85438ee service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Neutron deleted interface dc1a6185-a139-4788-bbd2-d5540dd42733; detaching it from the instance and deleting it from the info cache [ 1541.352216] env[63371]: DEBUG nova.network.neutron [req-39cf6508-acdb-4fad-9d49-c0e9cfb83db1 req-543a3886-7a24-4456-8914-1157c85438ee service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.466361] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774143, 'name': PowerOffVM_Task, 'duration_secs': 0.274149} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.466361] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1541.466361] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 17 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1541.470721] env[63371]: INFO nova.compute.manager [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Rebuilding instance [ 1541.531242] env[63371]: DEBUG nova.compute.manager [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1541.532692] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10c0d1c-6c1a-410c-a61c-f79153adfdfc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.539191] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52260ed7-df1f-ab96-579a-69cf8db35b11, 'name': SearchDatastore_Task, 'duration_secs': 0.016247} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1541.539764] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1541.540023] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b880750e-7bf4-412c-bcff-eb2c343f60f0/b880750e-7bf4-412c-bcff-eb2c343f60f0.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1541.540570] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d454ab1-b3ea-4b41-ae19-a0e0eecf8079 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.550134] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1541.550134] env[63371]: value = "task-1774144" [ 1541.550134] env[63371]: _type = "Task" [ 1541.550134] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1541.559669] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774144, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1541.588390] env[63371]: DEBUG nova.compute.manager [req-99c533b5-2d8d-4a7c-896e-6529edd72f05 req-3b88459a-5e1d-41df-a541-39621e8ad527 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Received event network-vif-deleted-12bfc72d-5ca7-4f11-8259-77887b5af47c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1541.588508] env[63371]: INFO nova.compute.manager [req-99c533b5-2d8d-4a7c-896e-6529edd72f05 req-3b88459a-5e1d-41df-a541-39621e8ad527 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Neutron deleted interface 12bfc72d-5ca7-4f11-8259-77887b5af47c; detaching it from the instance and deleting it from the info cache [ 1541.588677] env[63371]: DEBUG nova.network.neutron [req-99c533b5-2d8d-4a7c-896e-6529edd72f05 req-3b88459a-5e1d-41df-a541-39621e8ad527 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.742081] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d686a30b-f4a6-4316-879a-2d2bb301f7d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.751368] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbf7bf3-daf1-46fa-85b5-f33f9dc680c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.786418] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2667f74-13fb-4d4d-90bf-5ad04ba6c5ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.795085] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935649d8-42af-43e3-8ee1-88b2352a9e5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.800194] env[63371]: DEBUG nova.network.neutron [-] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.813767] env[63371]: DEBUG nova.compute.provider_tree [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1541.855808] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-54030da2-0cde-4f89-ac01-2b5e4275ad10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.858486] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.867931] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b5907c-1020-4de3-a31c-f3f5cbee69af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1541.907811] env[63371]: DEBUG nova.compute.manager [req-39cf6508-acdb-4fad-9d49-c0e9cfb83db1 req-543a3886-7a24-4456-8914-1157c85438ee service nova] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Detach interface failed, port_id=dc1a6185-a139-4788-bbd2-d5540dd42733, reason: Instance d6bc618e-33c9-4b45-b79f-afe6811acd4e could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1541.975583] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1541.975909] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1541.976138] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1541.976379] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1541.976594] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1541.976810] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1541.977088] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1541.977263] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1541.977430] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1541.977589] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1541.977755] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1541.983394] env[63371]: DEBUG nova.network.neutron [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1541.984697] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ecb91d4-05fc-494d-a181-c3927b165b61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.004398] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1542.004398] env[63371]: value = "task-1774145" [ 1542.004398] env[63371]: _type = "Task" [ 1542.004398] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.014500] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774145, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.048279] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1542.048631] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3cbb2fe5-23da-4a87-bb71-db37bc5bfe7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.061506] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774144, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506425} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.063167] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] b880750e-7bf4-412c-bcff-eb2c343f60f0/b880750e-7bf4-412c-bcff-eb2c343f60f0.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1542.063393] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1542.063704] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1542.063704] env[63371]: value = "task-1774146" [ 1542.063704] env[63371]: _type = "Task" [ 1542.063704] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.063893] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e717e50-4672-4368-9e67-3da30d2f1944 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.076124] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774146, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.077591] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1542.077591] env[63371]: value = "task-1774147" [ 1542.077591] env[63371]: _type = "Task" [ 1542.077591] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.096880] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83fa19a1-599f-4813-8755-1e529321a0d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.107638] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31aa1bc4-4548-4721-8d92-33034c4c9093 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.146011] env[63371]: DEBUG nova.compute.manager [req-99c533b5-2d8d-4a7c-896e-6529edd72f05 req-3b88459a-5e1d-41df-a541-39621e8ad527 service nova] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Detach interface failed, port_id=12bfc72d-5ca7-4f11-8259-77887b5af47c, reason: Instance 574121c4-c721-4d30-81ec-3f2310a7b6d1 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1542.303280] env[63371]: INFO nova.compute.manager [-] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Took 1.43 seconds to deallocate network for instance. [ 1542.317667] env[63371]: DEBUG nova.scheduler.client.report [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1542.446533] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.446859] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.497042] env[63371]: INFO nova.compute.manager [-] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Took 1.35 seconds to deallocate network for instance. [ 1542.515386] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774145, 'name': ReconfigVM_Task, 'duration_secs': 0.280393} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.515711] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 33 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1542.578135] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774146, 'name': PowerOffVM_Task, 'duration_secs': 0.289395} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.578228] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1542.578434] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1542.582511] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1679b3d7-5cbe-4e72-a50f-992d75092101 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.590202] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1542.593438] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c66eae19-8852-4c3b-b0a8-18125cb0654b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.595224] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774147, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083383} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1542.595469] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1542.596561] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1e51f4-aac5-4f6f-b083-a91fa848f652 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.631301] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] b880750e-7bf4-412c-bcff-eb2c343f60f0/b880750e-7bf4-412c-bcff-eb2c343f60f0.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1542.632010] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be78eb81-afbe-4a91-99a8-78df1e2a8654 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.653044] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1542.653044] env[63371]: value = "task-1774149" [ 1542.653044] env[63371]: _type = "Task" [ 1542.653044] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.661295] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774149, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.713450] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1542.713664] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1542.713896] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleting the datastore file [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1542.714778] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c736f2b-fe86-49b2-95a4-3f078c597a12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1542.722585] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1542.722585] env[63371]: value = "task-1774150" [ 1542.722585] env[63371]: _type = "Task" [ 1542.722585] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1542.731387] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774150, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1542.810253] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.826673] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.746s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1542.827277] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1542.830127] env[63371]: DEBUG oslo_concurrency.lockutils [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.346s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.004127] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.022258] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:32:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bb3da7ed-b700-420c-a825-23c0d1a3f881',id=26,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2130760861',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1543.022589] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1543.022809] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1543.023063] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1543.023293] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1543.023503] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1543.023764] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1543.024113] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1543.024269] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1543.024495] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1543.024725] env[63371]: DEBUG nova.virt.hardware [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1543.030204] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfiguring VM instance instance-00000031 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1543.030783] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b14b07d-2116-4c31-8553-80e45bda4fb1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.054771] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1543.054771] env[63371]: value = "task-1774151" [ 1543.054771] env[63371]: _type = "Task" [ 1543.054771] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.066448] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774151, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.166297] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774149, 'name': ReconfigVM_Task, 'duration_secs': 0.319374} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.166593] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Reconfigured VM instance instance-0000003a to attach disk [datastore1] b880750e-7bf4-412c-bcff-eb2c343f60f0/b880750e-7bf4-412c-bcff-eb2c343f60f0.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1543.167259] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f7fd7c98-b97f-4b05-97c8-d54780797350 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.178105] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1543.178105] env[63371]: value = "task-1774152" [ 1543.178105] env[63371]: _type = "Task" [ 1543.178105] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1543.191767] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774152, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.934177] env[63371]: DEBUG nova.compute.utils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1543.936089] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774150, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158891} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1543.938939] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1543.939093] env[63371]: DEBUG nova.network.neutron [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1543.946467] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1543.946665] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1543.946848] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1543.959659] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774152, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.960209] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774151, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1543.999158] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1cd0e1-b300-4308-a781-9ff3df944977 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.003387] env[63371]: DEBUG nova.policy [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1b2f698ebd747d6a84ac3f3e05e97b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a5b81b233f640b186d9798ff57a4945', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1544.009883] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754543e6-50fd-4b4d-94db-3469bb2b7c87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.042273] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd394b80-2a7a-4cea-9ee3-996ce64a70d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.050718] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca9c8c90-b3cc-4bbb-be63-ae53bb3a8a52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.065152] env[63371]: DEBUG nova.compute.provider_tree [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1544.289226] env[63371]: DEBUG nova.network.neutron [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Successfully created port: bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1544.444876] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1544.458220] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774152, 'name': Rename_Task, 'duration_secs': 0.98931} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.459344] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1544.459619] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774151, 'name': ReconfigVM_Task, 'duration_secs': 1.356435} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1544.460023] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1e1d70b-6055-4a53-ab75-c00cfc786b5d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.461695] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfigured VM instance instance-00000031 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1544.463152] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf11542-7a19-42bc-8bd2-a1e7734c694a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.487223] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1544.488727] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ab55d61-29c3-488e-9b57-cc0339062dd5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.501519] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1544.501519] env[63371]: value = "task-1774153" [ 1544.501519] env[63371]: _type = "Task" [ 1544.501519] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.510188] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1544.510188] env[63371]: value = "task-1774154" [ 1544.510188] env[63371]: _type = "Task" [ 1544.510188] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.524667] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774154, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1544.567979] env[63371]: DEBUG nova.scheduler.client.report [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1544.871229] env[63371]: INFO nova.compute.manager [None req-5e53a0f1-e096-4b70-87eb-4dc153c65ad4 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Get console output [ 1544.871616] env[63371]: WARNING nova.virt.vmwareapi.driver [None req-5e53a0f1-e096-4b70-87eb-4dc153c65ad4 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] The console log is missing. Check your VSPC configuration [ 1544.989487] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1544.989773] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1544.989877] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1544.990063] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1544.990231] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1544.990378] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1544.990578] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1544.990728] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1544.990883] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1544.991104] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1544.991287] env[63371]: DEBUG nova.virt.hardware [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1544.992403] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8a9873-8ad4-48c0-b63c-2c0bff742447 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.001275] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de587a2-e122-4848-8a6c-ff7757421d2c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.024157] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:68:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e2249de3-2c03-4371-aab4-6173dd2b5d56', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1545.031691] env[63371]: DEBUG oslo.service.loopingcall [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1545.031950] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774153, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.032503] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1545.033136] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-479b7dd2-f67d-4787-bcfa-0a632d8e0b63 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.050264] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774154, 'name': ReconfigVM_Task, 'duration_secs': 0.30551} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.050875] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1545.051193] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 50 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1545.056513] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1545.056513] env[63371]: value = "task-1774155" [ 1545.056513] env[63371]: _type = "Task" [ 1545.056513] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.064889] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774155, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.074180] env[63371]: DEBUG oslo_concurrency.lockutils [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.244s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.074720] env[63371]: INFO nova.compute.manager [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Successfully reverted task state from image_uploading on failure for instance. [ 1545.076710] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.157s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.076926] env[63371]: DEBUG nova.objects.instance [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lazy-loading 'resources' on Instance uuid 594ff846-8e3e-4882-8ddc-41f824a77a5c {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server [None req-23f919f3-c4d6-4dc1-9f1a-e565bdabbc4a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Exception during message handling: oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-368326' has already been deleted or has not been completely created [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 233, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 230, in decorated_function [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return function(self, context, image_id, instance, [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4443, in snapshot_instance [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self._snapshot_instance(context, image_id, instance, [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4476, in _snapshot_instance [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self.driver.snapshot(context, instance, image_id, [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 571, in snapshot [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self._vmops.snapshot(context, instance, image_id, update_task_state) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1033, in snapshot [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server self._delete_vm_snapshot(instance, vm_ref, snapshot_ref) [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/decorator.py", line 232, in fun [ 1545.079194] env[63371]: ERROR oslo_messaging.rpc.server return caller(func, *(extras + args), **kw) [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 124, in retry_if_task_in_progress [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server f(*args, **kwargs) [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 937, in _delete_vm_snapshot [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server self._session._wait_for_task(delete_snapshot_task) [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server return self.wait_for_task(task_ref) [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server self.f(*self.args, **self.kw) [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server raise exceptions.translate_fault(task_info.error) [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-368326' has already been deleted or has not been completely created [ 1545.080638] env[63371]: ERROR oslo_messaging.rpc.server [ 1545.453970] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1545.474553] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1545.474804] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1545.474955] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1545.475180] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1545.475334] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1545.475515] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1545.475721] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1545.475891] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1545.476098] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1545.476276] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1545.476466] env[63371]: DEBUG nova.virt.hardware [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1545.477428] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f0e735-4e00-4fa8-96c1-09f295d2d5a1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.489798] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f706d366-9eed-4818-a9e9-c830fd4db0bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.514745] env[63371]: DEBUG oslo_vmware.api [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774153, 'name': PowerOnVM_Task, 'duration_secs': 0.749811} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.515016] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1545.515216] env[63371]: INFO nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Took 8.94 seconds to spawn the instance on the hypervisor. [ 1545.515414] env[63371]: DEBUG nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1545.516148] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8eeffc0-a7b7-4b34-9951-f4e5e8a86086 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.563740] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e856b3bd-7b66-4c98-ada3-985b66266385 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.573099] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774155, 'name': CreateVM_Task, 'duration_secs': 0.360208} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.586750] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1545.590391] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1545.590755] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1545.590872] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1545.591715] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89480b4b-4934-4aef-afe7-c85346896963 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.594340] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a950931b-ca9a-4e06-ac9d-df1c4c1480ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.614758] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 67 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1545.621584] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1545.621584] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a75a4e-c571-78c9-19d1-4bc080ce5348" [ 1545.621584] env[63371]: _type = "Task" [ 1545.621584] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.632783] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a75a4e-c571-78c9-19d1-4bc080ce5348, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.829933] env[63371]: DEBUG nova.compute.manager [req-0736a095-4800-4bda-b0d4-ee5cc0485a7b req-d1235788-f926-444b-a42a-7571385bae18 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Received event network-vif-plugged-bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1545.830252] env[63371]: DEBUG oslo_concurrency.lockutils [req-0736a095-4800-4bda-b0d4-ee5cc0485a7b req-d1235788-f926-444b-a42a-7571385bae18 service nova] Acquiring lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.830461] env[63371]: DEBUG oslo_concurrency.lockutils [req-0736a095-4800-4bda-b0d4-ee5cc0485a7b req-d1235788-f926-444b-a42a-7571385bae18 service nova] Lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.831345] env[63371]: DEBUG oslo_concurrency.lockutils [req-0736a095-4800-4bda-b0d4-ee5cc0485a7b req-d1235788-f926-444b-a42a-7571385bae18 service nova] Lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.831345] env[63371]: DEBUG nova.compute.manager [req-0736a095-4800-4bda-b0d4-ee5cc0485a7b req-d1235788-f926-444b-a42a-7571385bae18 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] No waiting events found dispatching network-vif-plugged-bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1545.831345] env[63371]: WARNING nova.compute.manager [req-0736a095-4800-4bda-b0d4-ee5cc0485a7b req-d1235788-f926-444b-a42a-7571385bae18 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Received unexpected event network-vif-plugged-bc802b6c-1a40-491b-8222-aa71e5d0bcd3 for instance with vm_state building and task_state spawning. [ 1545.918915] env[63371]: DEBUG nova.network.neutron [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Successfully updated port: bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1545.988799] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "47c1c242-d190-4523-8033-307c5a9b7535" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.989061] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "47c1c242-d190-4523-8033-307c5a9b7535" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.989273] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "47c1c242-d190-4523-8033-307c5a9b7535-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.989455] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "47c1c242-d190-4523-8033-307c5a9b7535-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1545.989629] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "47c1c242-d190-4523-8033-307c5a9b7535-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1545.992120] env[63371]: INFO nova.compute.manager [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Terminating instance [ 1545.995949] env[63371]: DEBUG nova.compute.manager [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1545.996084] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1545.996903] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b74d3b-21f7-42dc-8452-3bd986a37e1e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.007307] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1546.007550] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78e27e44-3e04-4af7-ab09-02c4d0f95ead {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.014161] env[63371]: DEBUG oslo_vmware.api [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1546.014161] env[63371]: value = "task-1774156" [ 1546.014161] env[63371]: _type = "Task" [ 1546.014161] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.030762] env[63371]: DEBUG oslo_vmware.api [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774156, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.034474] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d726d2-4b09-4e3f-904e-6a59dd6c8522 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.037271] env[63371]: INFO nova.compute.manager [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Took 43.31 seconds to build instance. [ 1546.043156] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6db18365-3c6e-4208-b45f-4139d61a9056 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.078615] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b49aa9a-3a36-4973-ac91-d78194ea0df3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.088021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eebe311-5228-4d05-bc23-de002059bec7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.104479] env[63371]: DEBUG nova.compute.provider_tree [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1546.136012] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a75a4e-c571-78c9-19d1-4bc080ce5348, 'name': SearchDatastore_Task, 'duration_secs': 0.035061} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.136351] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.136574] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1546.136810] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.136946] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.137135] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1546.137400] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d1fe410-56a5-4e34-9252-ec1db441e451 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.148499] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1546.148684] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1546.149580] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3195bfd2-3010-42c9-aa9a-376fc9b295c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.157349] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1546.157349] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528c5ca0-d65f-8e2c-fa46-3c5eacf23f4b" [ 1546.157349] env[63371]: _type = "Task" [ 1546.157349] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.168620] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528c5ca0-d65f-8e2c-fa46-3c5eacf23f4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.169902] env[63371]: DEBUG nova.network.neutron [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Port e13a7d6d-6643-4b64-a4b1-2a59397c5307 binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1546.425362] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "refresh_cache-704978f9-3b24-4a73-8f64-b8e3e9e94a04" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1546.425531] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "refresh_cache-704978f9-3b24-4a73-8f64-b8e3e9e94a04" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1546.425673] env[63371]: DEBUG nova.network.neutron [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1546.525958] env[63371]: DEBUG oslo_vmware.api [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774156, 'name': PowerOffVM_Task, 'duration_secs': 0.185224} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.526308] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1546.526480] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1546.526738] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51dfb172-b8a6-4024-bce5-aff8f10287d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.539177] env[63371]: DEBUG oslo_concurrency.lockutils [None req-db3a5c14-352b-492d-8749-fae3276cd430 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.819s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.607929] env[63371]: DEBUG nova.scheduler.client.report [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1546.640908] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1546.641274] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1546.641464] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Deleting the datastore file [datastore1] 47c1c242-d190-4523-8033-307c5a9b7535 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1546.641747] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6451dea1-ba95-42fb-a173-25ee843d0d37 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.648991] env[63371]: DEBUG oslo_vmware.api [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1546.648991] env[63371]: value = "task-1774158" [ 1546.648991] env[63371]: _type = "Task" [ 1546.648991] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.658067] env[63371]: DEBUG oslo_vmware.api [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774158, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.667051] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528c5ca0-d65f-8e2c-fa46-3c5eacf23f4b, 'name': SearchDatastore_Task, 'duration_secs': 0.01452} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.667857] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6bacf0d-25a8-4dea-9e1c-afc2c0d4cf9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.677819] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1546.677819] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]527f2122-c754-7c27-d16f-6827bafd6c66" [ 1546.677819] env[63371]: _type = "Task" [ 1546.677819] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.688210] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527f2122-c754-7c27-d16f-6827bafd6c66, 'name': SearchDatastore_Task, 'duration_secs': 0.010363} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1546.688505] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1546.688772] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1546.689060] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a774f536-6fa4-470e-bf70-597bd563d880 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.696537] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1546.696537] env[63371]: value = "task-1774159" [ 1546.696537] env[63371]: _type = "Task" [ 1546.696537] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1546.706456] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774159, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1546.966948] env[63371]: DEBUG nova.network.neutron [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1547.043467] env[63371]: DEBUG nova.compute.manager [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1547.113608] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.116660] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.434s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.116955] env[63371]: DEBUG nova.objects.instance [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lazy-loading 'resources' on Instance uuid 76c861a7-30f2-40f4-b723-7912975f36f8 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1547.140292] env[63371]: DEBUG nova.network.neutron [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Updating instance_info_cache with network_info: [{"id": "bc802b6c-1a40-491b-8222-aa71e5d0bcd3", "address": "fa:16:3e:36:9a:44", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc802b6c-1a", "ovs_interfaceid": "bc802b6c-1a40-491b-8222-aa71e5d0bcd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1547.145736] env[63371]: INFO nova.scheduler.client.report [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleted allocations for instance 594ff846-8e3e-4882-8ddc-41f824a77a5c [ 1547.163376] env[63371]: DEBUG oslo_vmware.api [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774158, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143051} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.163741] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1547.163986] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1547.164243] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1547.164472] env[63371]: INFO nova.compute.manager [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1547.164789] env[63371]: DEBUG oslo.service.loopingcall [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1547.165063] env[63371]: DEBUG nova.compute.manager [-] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1547.165212] env[63371]: DEBUG nova.network.neutron [-] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1547.198858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.199255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.199822] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.212132] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774159, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490394} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.212403] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1547.212617] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1547.213278] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-31a724a2-9538-4b87-a215-92d43c6bc505 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.222080] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1547.222080] env[63371]: value = "task-1774160" [ 1547.222080] env[63371]: _type = "Task" [ 1547.222080] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.232733] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774160, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.567852] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.646335] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "refresh_cache-704978f9-3b24-4a73-8f64-b8e3e9e94a04" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1547.646335] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Instance network_info: |[{"id": "bc802b6c-1a40-491b-8222-aa71e5d0bcd3", "address": "fa:16:3e:36:9a:44", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc802b6c-1a", "ovs_interfaceid": "bc802b6c-1a40-491b-8222-aa71e5d0bcd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1547.649995] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:9a:44', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc802b6c-1a40-491b-8222-aa71e5d0bcd3', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1547.658581] env[63371]: DEBUG oslo.service.loopingcall [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1547.660537] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1547.661082] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90a073e9-5c76-482a-93d7-81de4c6bb590 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "594ff846-8e3e-4882-8ddc-41f824a77a5c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.264s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1547.661904] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecbd4afa-7bb8-49f5-ae25-d258e5326014 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.696062] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1547.696062] env[63371]: value = "task-1774161" [ 1547.696062] env[63371]: _type = "Task" [ 1547.696062] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.708118] env[63371]: DEBUG oslo_concurrency.lockutils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1547.708387] env[63371]: DEBUG oslo_concurrency.lockutils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1547.714833] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774161, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.732310] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774160, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073292} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.735309] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1547.736833] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1547ecb-9e01-45fc-b82f-ae9dcd04fffb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.759991] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1547.763229] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41442ded-6fe7-43e9-87e2-9b84758e451d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.786339] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1547.786339] env[63371]: value = "task-1774162" [ 1547.786339] env[63371]: _type = "Task" [ 1547.786339] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.802343] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774162, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.107197] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a6eca4-d7a5-4edc-a0c6-86e401626099 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.112447] env[63371]: DEBUG nova.compute.manager [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Received event network-changed-bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1548.112627] env[63371]: DEBUG nova.compute.manager [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Refreshing instance network info cache due to event network-changed-bc802b6c-1a40-491b-8222-aa71e5d0bcd3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1548.112836] env[63371]: DEBUG oslo_concurrency.lockutils [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] Acquiring lock "refresh_cache-704978f9-3b24-4a73-8f64-b8e3e9e94a04" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.112975] env[63371]: DEBUG oslo_concurrency.lockutils [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] Acquired lock "refresh_cache-704978f9-3b24-4a73-8f64-b8e3e9e94a04" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.113152] env[63371]: DEBUG nova.network.neutron [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Refreshing network info cache for port bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1548.119965] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb56610-eae2-4f28-9da1-9fcbf5e744b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.151877] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33cf6419-1b28-421f-82ec-29638d31c96b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.160010] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1152afc3-6fe9-4849-9cbd-48892bf9cf17 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.175797] env[63371]: DEBUG nova.compute.provider_tree [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1548.189748] env[63371]: DEBUG nova.network.neutron [-] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.210676] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774161, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.216377] env[63371]: DEBUG nova.compute.utils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1548.271857] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.271857] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.271857] env[63371]: DEBUG nova.network.neutron [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1548.299315] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774162, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.679069] env[63371]: DEBUG nova.scheduler.client.report [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1548.692795] env[63371]: INFO nova.compute.manager [-] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Took 1.53 seconds to deallocate network for instance. [ 1548.712139] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774161, 'name': CreateVM_Task, 'duration_secs': 0.967257} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.712586] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1548.713353] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1548.713521] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1548.713786] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1548.714064] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-771fd3c5-695c-43e8-9a56-c9eedcb36fcf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.719732] env[63371]: DEBUG oslo_concurrency.lockutils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1548.720910] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1548.720910] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f821d4-3cf5-3a0e-171d-ad841e8d0f50" [ 1548.720910] env[63371]: _type = "Task" [ 1548.720910] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.732582] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f821d4-3cf5-3a0e-171d-ad841e8d0f50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.797473] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774162, 'name': ReconfigVM_Task, 'duration_secs': 0.972661} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1548.797700] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Reconfigured VM instance instance-00000013 to attach disk [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1548.798358] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83c26eb6-ecc5-45d3-aa59-57033fce462f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.805821] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1548.805821] env[63371]: value = "task-1774163" [ 1548.805821] env[63371]: _type = "Task" [ 1548.805821] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1548.817436] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774163, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1548.903098] env[63371]: DEBUG nova.network.neutron [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Updated VIF entry in instance network info cache for port bc802b6c-1a40-491b-8222-aa71e5d0bcd3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1548.903602] env[63371]: DEBUG nova.network.neutron [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Updating instance_info_cache with network_info: [{"id": "bc802b6c-1a40-491b-8222-aa71e5d0bcd3", "address": "fa:16:3e:36:9a:44", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc802b6c-1a", "ovs_interfaceid": "bc802b6c-1a40-491b-8222-aa71e5d0bcd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1548.987413] env[63371]: DEBUG nova.network.neutron [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.184455] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.068s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.186712] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.485s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.188220] env[63371]: INFO nova.compute.claims [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1549.205298] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.209221] env[63371]: INFO nova.scheduler.client.report [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Deleted allocations for instance 76c861a7-30f2-40f4-b723-7912975f36f8 [ 1549.232417] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f821d4-3cf5-3a0e-171d-ad841e8d0f50, 'name': SearchDatastore_Task, 'duration_secs': 0.015429} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.233039] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.233137] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1549.233459] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.233655] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.233853] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1549.234886] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac4b50c2-e3c7-4570-a7eb-59e30ce3194c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.245843] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1549.246064] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1549.246818] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8acf8e9b-38c1-4918-8807-1a14b57506c7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.253763] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1549.253763] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523b1f5d-660d-0385-4923-e87b93946ad2" [ 1549.253763] env[63371]: _type = "Task" [ 1549.253763] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.262747] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523b1f5d-660d-0385-4923-e87b93946ad2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.321213] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774163, 'name': Rename_Task, 'duration_secs': 0.462631} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.321213] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1549.321213] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de83065a-3e37-455f-a66d-f2851d1341ee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.328019] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1549.328019] env[63371]: value = "task-1774164" [ 1549.328019] env[63371]: _type = "Task" [ 1549.328019] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.335222] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.406772] env[63371]: DEBUG oslo_concurrency.lockutils [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] Releasing lock "refresh_cache-704978f9-3b24-4a73-8f64-b8e3e9e94a04" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.406772] env[63371]: DEBUG nova.compute.manager [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Received event network-vif-deleted-a2807b8c-5895-474a-9c75-58bd21982409 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1549.406772] env[63371]: INFO nova.compute.manager [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Neutron deleted interface a2807b8c-5895-474a-9c75-58bd21982409; detaching it from the instance and deleting it from the info cache [ 1549.406772] env[63371]: DEBUG nova.network.neutron [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1549.490027] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1549.717297] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6b673eaa-98d1-4a00-bc06-aafaf6e9f718 tempest-ListImageFiltersTestJSON-1665464411 tempest-ListImageFiltersTestJSON-1665464411-project-member] Lock "76c861a7-30f2-40f4-b723-7912975f36f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.892s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.765585] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523b1f5d-660d-0385-4923-e87b93946ad2, 'name': SearchDatastore_Task, 'duration_secs': 0.012472} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1549.766418] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5fae500-6d29-44ba-9c91-fb93046dc16c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.772498] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1549.772498] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5237fd57-b607-b9dc-2a51-33ee49548381" [ 1549.772498] env[63371]: _type = "Task" [ 1549.772498] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1549.781044] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5237fd57-b607-b9dc-2a51-33ee49548381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.784739] env[63371]: DEBUG oslo_concurrency.lockutils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1549.784973] env[63371]: DEBUG oslo_concurrency.lockutils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.785215] env[63371]: INFO nova.compute.manager [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Attaching volume 31062dc6-9857-475c-b6b3-4e33c4ca4a59 to /dev/sdb [ 1549.816661] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2774405a-22f4-4922-b6f1-36a6db059490 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.825024] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0480e6c3-547d-4b05-9d66-71e649c4996a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.838207] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774164, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1549.845032] env[63371]: DEBUG nova.virt.block_device [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Updating existing volume attachment record: 8b4518a1-5a2f-4731-8e95-cba3f5d3743c {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1549.909680] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a57dcaf0-671a-4e1d-b968-cf6891bfea46 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.919669] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26e2523-645d-4c8d-afeb-dcc24e50aee0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.953515] env[63371]: DEBUG nova.compute.manager [req-cf9e7d7f-a9e3-497c-b22a-a4c3e94f3a2b req-b426adb4-e989-4b8e-ba3a-f563d1c8f408 service nova] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Detach interface failed, port_id=a2807b8c-5895-474a-9c75-58bd21982409, reason: Instance 47c1c242-d190-4523-8033-307c5a9b7535 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1550.016423] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad3e544-6fcb-4fa8-8e9b-5df5a323e01a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.036705] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d458beab-3910-46ec-a03d-a34d1ad5d425 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.043532] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 83 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1550.283396] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5237fd57-b607-b9dc-2a51-33ee49548381, 'name': SearchDatastore_Task, 'duration_secs': 0.011795} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.287213] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1550.287706] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 704978f9-3b24-4a73-8f64-b8e3e9e94a04/704978f9-3b24-4a73-8f64-b8e3e9e94a04.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1550.288343] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0367c34-5dbd-468c-b123-a65394cce285 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.300152] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1550.300152] env[63371]: value = "task-1774168" [ 1550.300152] env[63371]: _type = "Task" [ 1550.300152] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.311557] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774168, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.341917] env[63371]: DEBUG oslo_vmware.api [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774164, 'name': PowerOnVM_Task, 'duration_secs': 0.553615} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.342217] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1550.342428] env[63371]: DEBUG nova.compute.manager [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1550.343255] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a783730e-92ae-4f7f-ad49-a03c1ba0c164 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.551154] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1550.551856] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5fe72bc8-acf2-4b86-86ae-f9a85127bd24 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.562667] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1550.562667] env[63371]: value = "task-1774169" [ 1550.562667] env[63371]: _type = "Task" [ 1550.562667] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.576170] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774169, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.664973] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc4d22e-a6cf-459e-96c5-f3c3ae3d206e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.675402] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d794ee9f-6f8a-494b-a37b-f938bc176ad8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.714623] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801acafc-bbe9-4596-a4dd-b8b3caee39be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.725019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629c915e-afe4-4382-86e6-8d53c028e3f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.746571] env[63371]: DEBUG nova.compute.provider_tree [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1550.810772] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774168, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498505} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1550.811093] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 704978f9-3b24-4a73-8f64-b8e3e9e94a04/704978f9-3b24-4a73-8f64-b8e3e9e94a04.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1550.811327] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1550.811668] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-181283ed-909e-4d40-a6bc-1ef927bc1f0e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.820712] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1550.820712] env[63371]: value = "task-1774170" [ 1550.820712] env[63371]: _type = "Task" [ 1550.820712] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1550.831189] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774170, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1550.863940] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.076693] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774169, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.250333] env[63371]: DEBUG nova.scheduler.client.report [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1551.266016] env[63371]: INFO nova.compute.manager [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Rebuilding instance [ 1551.325510] env[63371]: DEBUG nova.compute.manager [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1551.326292] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62d5e12-c85f-44c8-aaf5-39a9f6cb8a49 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.338188] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774170, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071096} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.338624] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1551.339421] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d53dd4-07b5-4022-a599-8f32b096e3e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.365287] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 704978f9-3b24-4a73-8f64-b8e3e9e94a04/704978f9-3b24-4a73-8f64-b8e3e9e94a04.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1551.365931] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-326d9f29-20da-4135-9859-1271684f356f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.392515] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1551.392515] env[63371]: value = "task-1774171" [ 1551.392515] env[63371]: _type = "Task" [ 1551.392515] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.405360] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774171, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.577844] env[63371]: DEBUG oslo_vmware.api [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774169, 'name': PowerOnVM_Task, 'duration_secs': 0.546951} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1551.578146] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1551.578334] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4eea7e91-1b6c-4310-9438-c42ec8f63f4a tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance '96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f' progress to 100 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1551.756624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.757226] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1551.761658] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.789s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.764783] env[63371]: INFO nova.compute.claims [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1551.840354] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1551.840686] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d86f54d-4fc6-4f7d-aa71-d8eb709dd057 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.848442] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1551.848442] env[63371]: value = "task-1774172" [ 1551.848442] env[63371]: _type = "Task" [ 1551.848442] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.858967] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.903337] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774171, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.272888] env[63371]: DEBUG nova.compute.utils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1552.275288] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1552.275640] env[63371]: DEBUG nova.network.neutron [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1552.363972] env[63371]: DEBUG nova.policy [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '205b9986577149cca5f5102f89f7283f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8973623e406e4ab699162499116ac8d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1552.365492] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774172, 'name': PowerOffVM_Task, 'duration_secs': 0.428093} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.365813] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1552.366123] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1552.367876] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125eafc3-6c4c-4e5a-919d-9e6d3d9d851e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.378384] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1552.379025] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b84743ce-f6fc-4365-8d58-5e5111ee9740 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.403325] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774171, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.684166] env[63371]: DEBUG nova.network.neutron [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Successfully created port: dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1552.780874] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1552.904841] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774171, 'name': ReconfigVM_Task, 'duration_secs': 1.096264} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.915825] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 704978f9-3b24-4a73-8f64-b8e3e9e94a04/704978f9-3b24-4a73-8f64-b8e3e9e94a04.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1552.915825] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c2a025b8-a466-47f2-9540-e7997e53c45b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.925991] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1552.925991] env[63371]: value = "task-1774175" [ 1552.925991] env[63371]: _type = "Task" [ 1552.925991] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.931547] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1552.931547] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1552.931547] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleting the datastore file [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1552.937024] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-14c2022e-65e1-4ad0-86c4-63e9736a726f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.943252] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774175, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.945260] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1552.945260] env[63371]: value = "task-1774176" [ 1552.945260] env[63371]: _type = "Task" [ 1552.945260] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.961584] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774176, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.236131] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869bc505-bdd6-45e2-b76e-5a60f4a8dab5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.244557] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f71ed35-9492-4559-a382-56468598e35e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.275679] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0798b27-ef25-46ff-ac26-a52b662ae177 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.284021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67aaf2d2-41d3-4a28-b4e3-c97d08518d8a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.301418] env[63371]: DEBUG nova.compute.provider_tree [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1553.441567] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774175, 'name': Rename_Task, 'duration_secs': 0.243259} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.441888] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1553.442217] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b5d2aeb6-d01d-4d22-9589-2a82532f3bd2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.449149] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1553.449149] env[63371]: value = "task-1774177" [ 1553.449149] env[63371]: _type = "Task" [ 1553.449149] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.466354] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774176, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.214} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.468322] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774177, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.468587] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1553.468800] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1553.469037] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1553.601793] env[63371]: DEBUG nova.network.neutron [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Port e13a7d6d-6643-4b64-a4b1-2a59397c5307 binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1553.602208] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.602483] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.602483] env[63371]: DEBUG nova.network.neutron [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1553.792843] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1553.805154] env[63371]: DEBUG nova.scheduler.client.report [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1553.821147] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1553.821416] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1553.821570] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1553.822110] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1553.822340] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1553.822501] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1553.822710] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1553.822867] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1553.823044] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1553.823216] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1553.823386] env[63371]: DEBUG nova.virt.hardware [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1553.824283] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf98c642-640a-4358-9009-3093e0d3dcae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.833497] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55c8e71-a57a-4888-9b46-d931aa9a24dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.967175] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774177, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.287262] env[63371]: DEBUG nova.compute.manager [req-8f2aab51-5042-4b12-95ad-bee532d8e1af req-ad5fe783-1d84-4bfd-8d72-f577c60c45d9 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Received event network-vif-plugged-dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1554.287609] env[63371]: DEBUG oslo_concurrency.lockutils [req-8f2aab51-5042-4b12-95ad-bee532d8e1af req-ad5fe783-1d84-4bfd-8d72-f577c60c45d9 service nova] Acquiring lock "e05c7187-b4d6-481e-8bce-deb557dde6a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1554.287935] env[63371]: DEBUG oslo_concurrency.lockutils [req-8f2aab51-5042-4b12-95ad-bee532d8e1af req-ad5fe783-1d84-4bfd-8d72-f577c60c45d9 service nova] Lock "e05c7187-b4d6-481e-8bce-deb557dde6a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.288238] env[63371]: DEBUG oslo_concurrency.lockutils [req-8f2aab51-5042-4b12-95ad-bee532d8e1af req-ad5fe783-1d84-4bfd-8d72-f577c60c45d9 service nova] Lock "e05c7187-b4d6-481e-8bce-deb557dde6a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.288537] env[63371]: DEBUG nova.compute.manager [req-8f2aab51-5042-4b12-95ad-bee532d8e1af req-ad5fe783-1d84-4bfd-8d72-f577c60c45d9 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] No waiting events found dispatching network-vif-plugged-dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1554.288833] env[63371]: WARNING nova.compute.manager [req-8f2aab51-5042-4b12-95ad-bee532d8e1af req-ad5fe783-1d84-4bfd-8d72-f577c60c45d9 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Received unexpected event network-vif-plugged-dbbac158-9444-441f-b15b-2a793507b64f for instance with vm_state building and task_state spawning. [ 1554.310136] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1554.310858] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1554.315718] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.302s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1554.316507] env[63371]: DEBUG nova.objects.instance [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lazy-loading 'resources' on Instance uuid aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1554.384957] env[63371]: DEBUG nova.network.neutron [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1554.392262] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1554.392496] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368367', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'name': 'volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b880750e-7bf4-412c-bcff-eb2c343f60f0', 'attached_at': '', 'detached_at': '', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'serial': '31062dc6-9857-475c-b6b3-4e33c4ca4a59'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1554.393399] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4745c310-69f9-4295-9169-c05981fbec14 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.410967] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0851e4f9-8e49-4116-b4c2-5ac81817e799 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.414795] env[63371]: DEBUG nova.network.neutron [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Successfully updated port: dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1554.444882] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59/volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1554.445506] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0e81cf2-21cf-4cda-97b5-a30981782b59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.467774] env[63371]: DEBUG oslo_vmware.api [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774177, 'name': PowerOnVM_Task, 'duration_secs': 0.587992} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1554.469077] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1554.469291] env[63371]: INFO nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Took 9.02 seconds to spawn the instance on the hypervisor. [ 1554.469473] env[63371]: DEBUG nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1554.469805] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1554.469805] env[63371]: value = "task-1774178" [ 1554.469805] env[63371]: _type = "Task" [ 1554.469805] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.470511] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ffb79e-0d4c-41cd-8f93-b33c51eb46d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.484112] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774178, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.506246] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1554.507160] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1554.507160] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1554.507160] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1554.507160] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1554.507160] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1554.507466] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1554.509079] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1554.509079] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1554.509079] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1554.509079] env[63371]: DEBUG nova.virt.hardware [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1554.509335] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f6d336-2b5c-4eee-84ac-5fb2a805bc48 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.518501] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503d78d2-d725-48f5-9b28-0bcb702d863e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.534056] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4b:68:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ccc0e97b-b21d-4557-a4d4-fd7e8f973368', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e2249de3-2c03-4371-aab4-6173dd2b5d56', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1554.541656] env[63371]: DEBUG oslo.service.loopingcall [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1554.542182] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1554.542412] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c318b557-7814-4fdd-864b-a38a5948bd93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.565604] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1554.565604] env[63371]: value = "task-1774179" [ 1554.565604] env[63371]: _type = "Task" [ 1554.565604] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.574473] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774179, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.822067] env[63371]: DEBUG nova.compute.utils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1554.823804] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1554.823990] env[63371]: DEBUG nova.network.neutron [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1554.876014] env[63371]: DEBUG nova.policy [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd08b55f9fa3a45b0a8672e955ee360c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ceecd2a995cf4da0b4218e371065ca0b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1554.888109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1554.916639] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "refresh_cache-e05c7187-b4d6-481e-8bce-deb557dde6a8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.916806] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquired lock "refresh_cache-e05c7187-b4d6-481e-8bce-deb557dde6a8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.916942] env[63371]: DEBUG nova.network.neutron [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1555.000235] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774178, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.005501] env[63371]: INFO nova.compute.manager [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Took 43.83 seconds to build instance. [ 1555.078630] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774179, 'name': CreateVM_Task} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.078852] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1555.079687] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.079853] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.080181] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1555.080485] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acd1d4d3-56a6-4ee3-a3f1-9bb6f8e84c79 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.087933] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1555.087933] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52234a5a-d9fb-f39c-bbfe-a60672ec7c4c" [ 1555.087933] env[63371]: _type = "Task" [ 1555.087933] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.097839] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52234a5a-d9fb-f39c-bbfe-a60672ec7c4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.214805] env[63371]: DEBUG nova.network.neutron [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Successfully created port: 82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1555.254016] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54f2a11-e054-4cd7-8188-2537e60a05ab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.261565] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7219ce9-d898-4369-8c02-5c2224d15254 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.297209] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9acae5-8679-4f16-b303-9ba99188ad93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.301045] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674c9bee-ed58-4232-9637-2dae60ea849f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.307836] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Suspending the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1555.310467] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-8920a70b-fbf3-4137-892f-d07d4e724f49 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.313180] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854c09cb-7dd6-4c0f-8825-09c0c43b3f7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.327427] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1555.330515] env[63371]: DEBUG nova.compute.provider_tree [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1555.333431] env[63371]: DEBUG oslo_vmware.api [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1555.333431] env[63371]: value = "task-1774180" [ 1555.333431] env[63371]: _type = "Task" [ 1555.333431] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.343184] env[63371]: DEBUG oslo_vmware.api [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774180, 'name': SuspendVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.392209] env[63371]: DEBUG nova.compute.manager [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63371) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1555.392209] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1555.484614] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774178, 'name': ReconfigVM_Task, 'duration_secs': 0.590201} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.484948] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Reconfigured VM instance instance-0000003a to attach disk [datastore1] volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59/volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1555.490042] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8166191b-5053-41af-8efe-46a261e16090 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.504578] env[63371]: DEBUG nova.network.neutron [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1555.507541] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e21b57c-5bf0-4680-abb3-3b5c480ab4d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.346s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1555.510682] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1555.510682] env[63371]: value = "task-1774181" [ 1555.510682] env[63371]: _type = "Task" [ 1555.510682] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.520912] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774181, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.604657] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52234a5a-d9fb-f39c-bbfe-a60672ec7c4c, 'name': SearchDatastore_Task, 'duration_secs': 0.015824} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.605285] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1555.605285] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1555.605394] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.606031] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.606031] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1555.606031] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35eb6527-200a-4199-85dd-f343a3fde0d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.618832] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1555.619256] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1555.620335] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d3ffb871-3d75-4c18-8f8c-e425b363518d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.630653] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1555.630653] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522f4e86-97cf-3751-32fe-7cd0d2fc1203" [ 1555.630653] env[63371]: _type = "Task" [ 1555.630653] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.647312] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522f4e86-97cf-3751-32fe-7cd0d2fc1203, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.685742] env[63371]: DEBUG nova.network.neutron [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Updating instance_info_cache with network_info: [{"id": "dbbac158-9444-441f-b15b-2a793507b64f", "address": "fa:16:3e:49:5f:36", "network": {"id": "7b14192c-e7bb-428c-a24f-7334644bfb47", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-43156726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8973623e406e4ab699162499116ac8d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbbac158-94", "ovs_interfaceid": "dbbac158-9444-441f-b15b-2a793507b64f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.838428] env[63371]: DEBUG nova.scheduler.client.report [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1555.852161] env[63371]: DEBUG oslo_vmware.api [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774180, 'name': SuspendVM_Task} progress is 87%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.021434] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774181, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.141767] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522f4e86-97cf-3751-32fe-7cd0d2fc1203, 'name': SearchDatastore_Task, 'duration_secs': 0.095956} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.142726] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73ecd81a-b760-42d0-ac82-79e6b3efd802 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.148302] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1556.148302] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9fd9a-ce37-2d8f-08e9-555db9d80085" [ 1556.148302] env[63371]: _type = "Task" [ 1556.148302] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.156869] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9fd9a-ce37-2d8f-08e9-555db9d80085, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.191274] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Releasing lock "refresh_cache-e05c7187-b4d6-481e-8bce-deb557dde6a8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.191751] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Instance network_info: |[{"id": "dbbac158-9444-441f-b15b-2a793507b64f", "address": "fa:16:3e:49:5f:36", "network": {"id": "7b14192c-e7bb-428c-a24f-7334644bfb47", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-43156726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8973623e406e4ab699162499116ac8d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbbac158-94", "ovs_interfaceid": "dbbac158-9444-441f-b15b-2a793507b64f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1556.192272] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:5f:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4055505f-97ab-400b-969c-43d99b38fd48', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dbbac158-9444-441f-b15b-2a793507b64f', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1556.200864] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Creating folder: Project (8973623e406e4ab699162499116ac8d1). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1556.201164] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d9843764-9154-46fb-bf47-0c98327d4074 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.214425] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Created folder: Project (8973623e406e4ab699162499116ac8d1) in parent group-v368199. [ 1556.214781] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Creating folder: Instances. Parent ref: group-v368369. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1556.215176] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-abc2b75b-cfaa-4171-809a-ca26ecc0161a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.227294] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Created folder: Instances in parent group-v368369. [ 1556.230039] env[63371]: DEBUG oslo.service.loopingcall [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1556.230039] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1556.230039] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6db8288d-4389-4f5c-98be-b986675b3286 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.254396] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1556.254396] env[63371]: value = "task-1774184" [ 1556.254396] env[63371]: _type = "Task" [ 1556.254396] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.262757] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774184, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.314335] env[63371]: DEBUG nova.compute.manager [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Received event network-changed-dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1556.314554] env[63371]: DEBUG nova.compute.manager [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Refreshing instance network info cache due to event network-changed-dbbac158-9444-441f-b15b-2a793507b64f. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1556.314755] env[63371]: DEBUG oslo_concurrency.lockutils [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] Acquiring lock "refresh_cache-e05c7187-b4d6-481e-8bce-deb557dde6a8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1556.314897] env[63371]: DEBUG oslo_concurrency.lockutils [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] Acquired lock "refresh_cache-e05c7187-b4d6-481e-8bce-deb557dde6a8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1556.315069] env[63371]: DEBUG nova.network.neutron [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Refreshing network info cache for port dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1556.344151] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1556.354388] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1556.356381] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.240s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.356381] env[63371]: DEBUG nova.objects.instance [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lazy-loading 'resources' on Instance uuid e0369f27-68ea-49c4-8524-3dbbb3cde96e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1556.364619] env[63371]: DEBUG oslo_vmware.api [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774180, 'name': SuspendVM_Task} progress is 87%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.376666] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1556.376943] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1556.377169] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1556.377390] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1556.377551] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1556.377703] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1556.377908] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1556.378102] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1556.378373] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1556.378582] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1556.378761] env[63371]: DEBUG nova.virt.hardware [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1556.379707] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3febc60d-bf37-49a7-885c-76a56ebd6425 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.383127] env[63371]: INFO nova.scheduler.client.report [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Deleted allocations for instance aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf [ 1556.392195] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83c0c41-1b56-4537-9284-bdd786edb34e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.523496] env[63371]: DEBUG oslo_vmware.api [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774181, 'name': ReconfigVM_Task, 'duration_secs': 0.538754} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.523781] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368367', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'name': 'volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b880750e-7bf4-412c-bcff-eb2c343f60f0', 'attached_at': '', 'detached_at': '', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'serial': '31062dc6-9857-475c-b6b3-4e33c4ca4a59'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1556.659590] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9fd9a-ce37-2d8f-08e9-555db9d80085, 'name': SearchDatastore_Task, 'duration_secs': 0.043448} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.659854] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.660162] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1556.660403] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a55605a6-de03-42c9-83d4-8557c85285e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.669222] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1556.669222] env[63371]: value = "task-1774185" [ 1556.669222] env[63371]: _type = "Task" [ 1556.669222] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.679176] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774185, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.769016] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774184, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.809916] env[63371]: DEBUG nova.network.neutron [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Successfully updated port: 82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1556.852691] env[63371]: DEBUG oslo_vmware.api [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774180, 'name': SuspendVM_Task, 'duration_secs': 1.508934} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.855086] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Suspended the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1556.855242] env[63371]: DEBUG nova.compute.manager [None req-5ce1d030-1105-4917-a06f-04d860f9958a tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1556.856069] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ceb4570-88bc-4ab8-8b74-fe95a5c68db8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.897858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1613e8f-080a-46f0-a722-19f5ba645a1b tempest-ImagesOneServerTestJSON-1278559532 tempest-ImagesOneServerTestJSON-1278559532-project-member] Lock "aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.500s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1557.076356] env[63371]: DEBUG nova.network.neutron [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Updated VIF entry in instance network info cache for port dbbac158-9444-441f-b15b-2a793507b64f. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1557.076727] env[63371]: DEBUG nova.network.neutron [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Updating instance_info_cache with network_info: [{"id": "dbbac158-9444-441f-b15b-2a793507b64f", "address": "fa:16:3e:49:5f:36", "network": {"id": "7b14192c-e7bb-428c-a24f-7334644bfb47", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-43156726-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8973623e406e4ab699162499116ac8d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbbac158-94", "ovs_interfaceid": "dbbac158-9444-441f-b15b-2a793507b64f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1557.183730] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774185, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.237946] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c847b7a3-2907-44b5-8b61-3806766c4db0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.246189] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178d4e18-903b-4d1b-8b10-c84fec7e8dac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.279638] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46571ed5-7bd5-4edb-a48d-ef653df92f52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.289847] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774184, 'name': CreateVM_Task, 'duration_secs': 0.854823} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.289904] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1557.291230] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d584fa3-eef5-4752-8780-4195f07a8cc9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.295386] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.295597] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.295865] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1557.296125] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fade93bb-3ed4-41ca-9343-43a064eb1292 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.308758] env[63371]: DEBUG nova.compute.provider_tree [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1557.311078] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1557.311078] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52462d12-700c-bb75-1d9a-bda42bc6ff9a" [ 1557.311078] env[63371]: _type = "Task" [ 1557.311078] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.314646] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.314760] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.314906] env[63371]: DEBUG nova.network.neutron [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1557.322225] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52462d12-700c-bb75-1d9a-bda42bc6ff9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.567634] env[63371]: DEBUG nova.objects.instance [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lazy-loading 'flavor' on Instance uuid b880750e-7bf4-412c-bcff-eb2c343f60f0 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1557.579375] env[63371]: DEBUG oslo_concurrency.lockutils [req-3892168a-6198-4b54-a57e-6cc99ac00953 req-58f0fab7-478d-4f07-89be-2ba8c2a855e4 service nova] Releasing lock "refresh_cache-e05c7187-b4d6-481e-8bce-deb557dde6a8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.684051] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774185, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.812474] env[63371]: DEBUG nova.scheduler.client.report [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1557.830824] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52462d12-700c-bb75-1d9a-bda42bc6ff9a, 'name': SearchDatastore_Task, 'duration_secs': 0.049179} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.831234] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.831469] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1557.831740] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.832514] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.832514] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1557.832916] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed722591-03e5-42b5-80f2-3769beb0c6ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.845859] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1557.846686] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1557.847485] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0513607e-f48e-4045-90c3-5b67c7298452 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.854087] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1557.854087] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d33f30-23aa-36fa-b034-16ce87693664" [ 1557.854087] env[63371]: _type = "Task" [ 1557.854087] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.864141] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d33f30-23aa-36fa-b034-16ce87693664, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.875327] env[63371]: DEBUG nova.network.neutron [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1558.038876] env[63371]: DEBUG nova.network.neutron [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance_info_cache with network_info: [{"id": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "address": "fa:16:3e:50:09:23", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82aece5e-dc", "ovs_interfaceid": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1558.073199] env[63371]: DEBUG oslo_concurrency.lockutils [None req-856648c1-31a6-4a80-b50d-0d55160d9406 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.288s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.183330] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774185, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.178396} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.183590] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1558.183805] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1558.184060] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6d04a9c-fcfe-4384-aa09-de8d2083aea5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.191798] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1558.191798] env[63371]: value = "task-1774186" [ 1558.191798] env[63371]: _type = "Task" [ 1558.191798] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.203341] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774186, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.324116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.965s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.324116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.361s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.326978] env[63371]: INFO nova.compute.claims [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1558.347213] env[63371]: DEBUG nova.compute.manager [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Received event network-vif-plugged-82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1558.347213] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] Acquiring lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.347213] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.347213] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.347213] env[63371]: DEBUG nova.compute.manager [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] No waiting events found dispatching network-vif-plugged-82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1558.347733] env[63371]: WARNING nova.compute.manager [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Received unexpected event network-vif-plugged-82aece5e-dc40-4c18-a1a9-4b4e859fef2a for instance with vm_state building and task_state spawning. [ 1558.348094] env[63371]: DEBUG nova.compute.manager [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Received event network-changed-82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1558.348409] env[63371]: DEBUG nova.compute.manager [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Refreshing instance network info cache due to event network-changed-82aece5e-dc40-4c18-a1a9-4b4e859fef2a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1558.348707] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] Acquiring lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1558.351414] env[63371]: INFO nova.scheduler.client.report [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Deleted allocations for instance e0369f27-68ea-49c4-8524-3dbbb3cde96e [ 1558.369452] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.372018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.372018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "b880750e-7bf4-412c-bcff-eb2c343f60f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.372018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.372018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.372659] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d33f30-23aa-36fa-b034-16ce87693664, 'name': SearchDatastore_Task, 'duration_secs': 0.03301} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.374648] env[63371]: INFO nova.compute.manager [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Terminating instance [ 1558.376112] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18d01a29-defe-4a10-93a8-9c8d8f26549a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.380297] env[63371]: DEBUG nova.compute.manager [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1558.380605] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1558.380931] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca402362-6a29-4a38-a9a1-9621472f00c7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.394227] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1558.394227] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521fa4fb-d465-aee0-c1e5-44a4bfb935b9" [ 1558.394227] env[63371]: _type = "Task" [ 1558.394227] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.394227] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1558.394227] env[63371]: value = "task-1774187" [ 1558.394227] env[63371]: _type = "Task" [ 1558.394227] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.410026] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521fa4fb-d465-aee0-c1e5-44a4bfb935b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.410880] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774187, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.542392] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.546186] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Instance network_info: |[{"id": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "address": "fa:16:3e:50:09:23", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82aece5e-dc", "ovs_interfaceid": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1558.546538] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] Acquired lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1558.546724] env[63371]: DEBUG nova.network.neutron [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Refreshing network info cache for port 82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1558.548902] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:09:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3a0ddd7d-c321-4187-bdd8-b19044ea2c4a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '82aece5e-dc40-4c18-a1a9-4b4e859fef2a', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1558.557809] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating folder: Project (ceecd2a995cf4da0b4218e371065ca0b). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1558.558266] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d59fb624-e29e-4fc9-995e-ae9b081fec4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.572100] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Created folder: Project (ceecd2a995cf4da0b4218e371065ca0b) in parent group-v368199. [ 1558.572407] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating folder: Instances. Parent ref: group-v368372. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1558.572568] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e96ed30-254e-43aa-aa3c-b772a1337c9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.587298] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Created folder: Instances in parent group-v368372. [ 1558.587298] env[63371]: DEBUG oslo.service.loopingcall [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1558.587423] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1558.587615] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3828701a-991f-4c12-8ad8-f2f735254f17 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.611553] env[63371]: DEBUG nova.compute.manager [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1558.612473] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3048daee-6f57-4102-b1b3-2d04fff8e91e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.619435] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1558.619435] env[63371]: value = "task-1774190" [ 1558.619435] env[63371]: _type = "Task" [ 1558.619435] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.632448] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.707498] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774186, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133602} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.707498] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1558.707723] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91484889-6f1d-4410-9d4a-494d6bbf3dde {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.735908] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1558.736304] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8d64909-8a29-4231-af99-31630355a1f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.760056] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1558.760056] env[63371]: value = "task-1774191" [ 1558.760056] env[63371]: _type = "Task" [ 1558.760056] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.765433] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774191, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.864384] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a867b3b6-8f04-403d-b310-aee2e25e0a94 tempest-FloatingIPsAssociationTestJSON-1951641126 tempest-FloatingIPsAssociationTestJSON-1951641126-project-member] Lock "e0369f27-68ea-49c4-8524-3dbbb3cde96e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.173s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.910944] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774187, 'name': PowerOffVM_Task, 'duration_secs': 0.365629} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.914528] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1558.914746] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1558.914941] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368367', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'name': 'volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b880750e-7bf4-412c-bcff-eb2c343f60f0', 'attached_at': '', 'detached_at': '', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'serial': '31062dc6-9857-475c-b6b3-4e33c4ca4a59'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1558.915336] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521fa4fb-d465-aee0-c1e5-44a4bfb935b9, 'name': SearchDatastore_Task, 'duration_secs': 0.022318} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.916059] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7309679f-a4ce-4403-b444-ffb2ede76471 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.918496] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.918752] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e05c7187-b4d6-481e-8bce-deb557dde6a8/e05c7187-b4d6-481e-8bce-deb557dde6a8.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1558.918994] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f201be5e-035e-428d-97ae-66b5a3b206ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.947672] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-195dc7e2-c470-4cef-94e7-0c891cea03dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.950819] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1558.950819] env[63371]: value = "task-1774192" [ 1558.950819] env[63371]: _type = "Task" [ 1558.950819] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.956630] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b0947a-9f89-4eb7-81fe-605c4a9124de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.962013] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774192, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.987362] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a53d80-1279-44f0-9c46-ebdb1587ba40 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.008827] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] The volume has not been displaced from its original location: [datastore1] volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59/volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1559.014078] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Reconfiguring VM instance instance-0000003a to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1559.014429] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97e928e6-ea1b-4a9e-a425-49c7be4f0817 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.034403] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1559.034403] env[63371]: value = "task-1774193" [ 1559.034403] env[63371]: _type = "Task" [ 1559.034403] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.045289] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774193, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.128288] env[63371]: INFO nova.compute.manager [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] instance snapshotting [ 1559.128509] env[63371]: WARNING nova.compute.manager [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1559.133411] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.134509] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43778b0-18be-42c3-9c0a-6c185b4b46d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.153180] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d44a661-c2fa-462b-9c15-81bac8fe5b81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.267177] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.473932] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774192, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.475340] env[63371]: DEBUG nova.network.neutron [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updated VIF entry in instance network info cache for port 82aece5e-dc40-4c18-a1a9-4b4e859fef2a. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1559.475585] env[63371]: DEBUG nova.network.neutron [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance_info_cache with network_info: [{"id": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "address": "fa:16:3e:50:09:23", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82aece5e-dc", "ovs_interfaceid": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.548197] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774193, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.636122] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.666421] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1559.667100] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bc312f12-22c4-49f5-9b8c-cfb14d1fbf81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.677497] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1559.677497] env[63371]: value = "task-1774194" [ 1559.677497] env[63371]: _type = "Task" [ 1559.677497] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.694132] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774194, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.773018] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774191, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.837245] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e872a07e-5e8b-4898-a425-1a5fe32e5edb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.851060] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0543b89b-6cac-45b7-966f-c26a33106e29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.909403] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a897c7-858b-4b2a-b3db-cc3e5d1b7ebd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.919230] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d442cc1f-1b4b-45e8-a8ac-7b3e916b5b18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.937718] env[63371]: DEBUG nova.compute.provider_tree [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1559.963792] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774192, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.004232} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.964134] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e05c7187-b4d6-481e-8bce-deb557dde6a8/e05c7187-b4d6-481e-8bce-deb557dde6a8.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1559.964662] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1559.964662] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5642c582-483b-4da8-854a-3668d6424fdb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.973677] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1559.973677] env[63371]: value = "task-1774195" [ 1559.973677] env[63371]: _type = "Task" [ 1559.973677] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.984409] env[63371]: DEBUG oslo_concurrency.lockutils [req-3f199d4c-e2ee-49df-ba2a-568e000baebd req-59d3e5d7-380a-4a51-8127-02f69e8bc434 service nova] Releasing lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.985154] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774195, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.046787] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774193, 'name': ReconfigVM_Task, 'duration_secs': 0.981522} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.047434] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Reconfigured VM instance instance-0000003a to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1560.054510] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8e5c34b-eb3c-4506-83dd-185c6188e659 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.070440] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1560.070440] env[63371]: value = "task-1774196" [ 1560.070440] env[63371]: _type = "Task" [ 1560.070440] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.080537] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774196, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.135965] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.190948] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774194, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.271610] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774191, 'name': ReconfigVM_Task, 'duration_secs': 1.394321} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.271903] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Reconfigured VM instance instance-00000013 to attach disk [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60/e4608e3c-7083-42fa-b88c-8ee007ef7f60.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1560.272756] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e1272ca-837d-48c2-96a7-b416b0bb842f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.280305] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1560.280305] env[63371]: value = "task-1774197" [ 1560.280305] env[63371]: _type = "Task" [ 1560.280305] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.289490] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774197, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.440316] env[63371]: DEBUG nova.scheduler.client.report [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1560.484659] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774195, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.176642} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.484936] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1560.485999] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b32a78d-51d0-4666-a1b7-7c08f19412e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.510160] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] e05c7187-b4d6-481e-8bce-deb557dde6a8/e05c7187-b4d6-481e-8bce-deb557dde6a8.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1560.510160] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dacc814a-c8ef-47e1-b1fe-0ea0df98780a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.532027] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1560.532027] env[63371]: value = "task-1774198" [ 1560.532027] env[63371]: _type = "Task" [ 1560.532027] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.542111] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774198, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.580474] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774196, 'name': ReconfigVM_Task, 'duration_secs': 0.280192} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.580855] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368367', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'name': 'volume-31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b880750e-7bf4-412c-bcff-eb2c343f60f0', 'attached_at': '', 'detached_at': '', 'volume_id': '31062dc6-9857-475c-b6b3-4e33c4ca4a59', 'serial': '31062dc6-9857-475c-b6b3-4e33c4ca4a59'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1560.581240] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1560.582411] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d345a7-f1d2-483c-a3cb-998851daec93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.590609] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1560.590866] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04bef8cf-c465-4482-841a-b8b0372c7179 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.637615] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.691987] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774194, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.792913] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774197, 'name': Rename_Task, 'duration_secs': 0.234603} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.793234] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1560.793570] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9a74c5a-258f-4d88-b35f-45b28923edae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.802103] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1560.802103] env[63371]: value = "task-1774200" [ 1560.802103] env[63371]: _type = "Task" [ 1560.802103] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.812317] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774200, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.947043] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.623s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.947579] env[63371]: DEBUG nova.compute.manager [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1560.951476] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.805s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.952966] env[63371]: INFO nova.compute.claims [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1561.042996] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774198, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.133906] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.192387] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774194, 'name': CreateSnapshot_Task, 'duration_secs': 1.220839} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.192688] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1561.193461] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f0c373-b13e-4001-947d-e477efd88d8e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.317996] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774200, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.458844] env[63371]: DEBUG nova.compute.utils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1561.462466] env[63371]: DEBUG nova.compute.manager [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1561.462618] env[63371]: DEBUG nova.network.neutron [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1561.543738] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774198, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.545225] env[63371]: DEBUG nova.policy [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aacd81490704110b6cc6aba338883a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a5897667b6b47deb7ff5b64f9499f36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1561.637771] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.712452] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1561.712685] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5e7592ed-b86a-41f2-baa3-5312a6d7111b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.722871] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1561.722871] env[63371]: value = "task-1774201" [ 1561.722871] env[63371]: _type = "Task" [ 1561.722871] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.733054] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774201, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.812759] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774200, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.964012] env[63371]: DEBUG nova.compute.manager [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1562.053592] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774198, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.137700] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.152192] env[63371]: DEBUG nova.network.neutron [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Successfully created port: bc8b891d-040a-4a55-a281-311c08ae828d {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1562.242094] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774201, 'name': CloneVM_Task} progress is 23%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.313325] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774200, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.432602] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61a7bb9-02f9-49ca-bd65-5ed55d6b71e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.440547] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb0962e-ae8d-4ddd-86df-642fddc818a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.475871] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378d1a35-c600-464a-9a26-78b280c2912f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.484256] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee302643-e95e-4b9b-879b-4f2dd506cd47 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.501490] env[63371]: DEBUG nova.compute.provider_tree [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1562.547548] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774198, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.549705] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1562.549705] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1562.549911] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleting the datastore file [datastore1] b880750e-7bf4-412c-bcff-eb2c343f60f0 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1562.550224] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6a9f9177-deff-4479-9a72-a85212287070 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.557000] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1562.557000] env[63371]: value = "task-1774202" [ 1562.557000] env[63371]: _type = "Task" [ 1562.557000] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.568258] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774202, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.638462] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774190, 'name': CreateVM_Task, 'duration_secs': 3.940667} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.638810] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1562.639600] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.639983] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.640376] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1562.640651] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f791debc-b957-49be-847b-fd5dcf89431f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.646437] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1562.646437] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528bf506-4b27-76d5-9336-19b7932b5179" [ 1562.646437] env[63371]: _type = "Task" [ 1562.646437] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.657056] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528bf506-4b27-76d5-9336-19b7932b5179, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.740336] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774201, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.818405] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774200, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.976146] env[63371]: DEBUG nova.compute.manager [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1563.004624] env[63371]: DEBUG nova.scheduler.client.report [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1563.014432] env[63371]: DEBUG nova.virt.hardware [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1563.014432] env[63371]: DEBUG nova.virt.hardware [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1563.014432] env[63371]: DEBUG nova.virt.hardware [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1563.014432] env[63371]: DEBUG nova.virt.hardware [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1563.014432] env[63371]: DEBUG nova.virt.hardware [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1563.014432] env[63371]: DEBUG nova.virt.hardware [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1563.014432] env[63371]: DEBUG nova.virt.hardware [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1563.014432] env[63371]: DEBUG nova.virt.hardware [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1563.014432] env[63371]: DEBUG nova.virt.hardware [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1563.014432] env[63371]: DEBUG nova.virt.hardware [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1563.014432] env[63371]: DEBUG nova.virt.hardware [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1563.014432] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04aec9db-e28a-4b47-9af0-d2c3f862d82c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.023936] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77029e5e-7fca-4074-8acf-792043f932ee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.048842] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774198, 'name': ReconfigVM_Task, 'duration_secs': 2.418496} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.049156] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Reconfigured VM instance instance-0000003c to attach disk [datastore1] e05c7187-b4d6-481e-8bce-deb557dde6a8/e05c7187-b4d6-481e-8bce-deb557dde6a8.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1563.050033] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-989eb3c0-d81c-41f6-b04a-e875acf499a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.058684] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1563.058684] env[63371]: value = "task-1774203" [ 1563.058684] env[63371]: _type = "Task" [ 1563.058684] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.070315] env[63371]: DEBUG oslo_vmware.api [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774202, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.44104} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.074046] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1563.074275] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1563.074488] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1563.074671] env[63371]: INFO nova.compute.manager [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Took 4.69 seconds to destroy the instance on the hypervisor. [ 1563.074921] env[63371]: DEBUG oslo.service.loopingcall [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1563.075155] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774203, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.075710] env[63371]: DEBUG nova.compute.manager [-] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1563.075808] env[63371]: DEBUG nova.network.neutron [-] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1563.163127] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528bf506-4b27-76d5-9336-19b7932b5179, 'name': SearchDatastore_Task, 'duration_secs': 0.027328} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.163624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1563.164244] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1563.164650] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1563.164883] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1563.165175] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1563.165812] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-363f3d50-4c90-4940-8e31-3fe0b5c9472a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.185809] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1563.186034] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1563.186819] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7f0e918-e418-4fb7-b3ab-7d90dcd32b66 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.195855] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1563.195855] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c4a175-e0ee-42df-e637-b23af05b5775" [ 1563.195855] env[63371]: _type = "Task" [ 1563.195855] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.205503] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c4a175-e0ee-42df-e637-b23af05b5775, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.241138] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774201, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.317474] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774200, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.518413] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.567s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.518944] env[63371]: DEBUG nova.compute.manager [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1563.521600] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.640s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1563.523140] env[63371]: INFO nova.compute.claims [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1563.583289] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774203, 'name': Rename_Task, 'duration_secs': 0.289181} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.584639] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1563.585023] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eee7a660-9ecb-46f8-9c9a-cca08434a775 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.593378] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1563.593378] env[63371]: value = "task-1774204" [ 1563.593378] env[63371]: _type = "Task" [ 1563.593378] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.604866] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774204, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.711218] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c4a175-e0ee-42df-e637-b23af05b5775, 'name': SearchDatastore_Task, 'duration_secs': 0.032401} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.712414] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a19a2f50-172a-4c05-a2af-5082502068e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.722543] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1563.722543] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52de8eb4-14b5-b5ca-9593-ae54dce809e3" [ 1563.722543] env[63371]: _type = "Task" [ 1563.722543] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1563.736341] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52de8eb4-14b5-b5ca-9593-ae54dce809e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.742685] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774201, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.813414] env[63371]: DEBUG nova.compute.manager [req-aa962dcf-4d7a-4499-9e55-5c56d399ecfa req-49f543dc-cb82-4bf7-9e6b-61ff673f3003 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Received event network-vif-deleted-d233c17c-a3d0-4e06-8087-721a7808298d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1563.813630] env[63371]: INFO nova.compute.manager [req-aa962dcf-4d7a-4499-9e55-5c56d399ecfa req-49f543dc-cb82-4bf7-9e6b-61ff673f3003 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Neutron deleted interface d233c17c-a3d0-4e06-8087-721a7808298d; detaching it from the instance and deleting it from the info cache [ 1563.813858] env[63371]: DEBUG nova.network.neutron [req-aa962dcf-4d7a-4499-9e55-5c56d399ecfa req-49f543dc-cb82-4bf7-9e6b-61ff673f3003 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1563.819330] env[63371]: DEBUG oslo_vmware.api [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774200, 'name': PowerOnVM_Task, 'duration_secs': 2.620099} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1563.819664] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1563.819858] env[63371]: DEBUG nova.compute.manager [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1563.821280] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1566fa-2475-4e96-a245-c49e62f9dd41 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.028439] env[63371]: DEBUG nova.compute.utils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1564.030432] env[63371]: DEBUG nova.compute.manager [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1564.030772] env[63371]: DEBUG nova.network.neutron [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1564.084439] env[63371]: DEBUG nova.compute.manager [req-6a34286a-2d93-474b-8221-95ab52497971 req-e7b3b956-7e2a-4765-be4f-827e05e72493 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received event network-vif-plugged-bc8b891d-040a-4a55-a281-311c08ae828d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1564.084685] env[63371]: DEBUG oslo_concurrency.lockutils [req-6a34286a-2d93-474b-8221-95ab52497971 req-e7b3b956-7e2a-4765-be4f-827e05e72493 service nova] Acquiring lock "d00602b9-16bf-4c11-bc47-6076dddbf159-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.084902] env[63371]: DEBUG oslo_concurrency.lockutils [req-6a34286a-2d93-474b-8221-95ab52497971 req-e7b3b956-7e2a-4765-be4f-827e05e72493 service nova] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.085212] env[63371]: DEBUG oslo_concurrency.lockutils [req-6a34286a-2d93-474b-8221-95ab52497971 req-e7b3b956-7e2a-4765-be4f-827e05e72493 service nova] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.085410] env[63371]: DEBUG nova.compute.manager [req-6a34286a-2d93-474b-8221-95ab52497971 req-e7b3b956-7e2a-4765-be4f-827e05e72493 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] No waiting events found dispatching network-vif-plugged-bc8b891d-040a-4a55-a281-311c08ae828d {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1564.085570] env[63371]: WARNING nova.compute.manager [req-6a34286a-2d93-474b-8221-95ab52497971 req-e7b3b956-7e2a-4765-be4f-827e05e72493 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received unexpected event network-vif-plugged-bc8b891d-040a-4a55-a281-311c08ae828d for instance with vm_state building and task_state spawning. [ 1564.086333] env[63371]: DEBUG nova.network.neutron [-] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1564.091352] env[63371]: DEBUG nova.policy [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd1f296edd77f44fda265ff06ba35dce0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '51d56c0c34d04535bbac2f8255380c32', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1564.106775] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774204, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.237880] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52de8eb4-14b5-b5ca-9593-ae54dce809e3, 'name': SearchDatastore_Task, 'duration_secs': 0.020611} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.238542] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1564.238901] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9862b0f0-ccf6-4e69-9e78-cf864adaa65e/9862b0f0-ccf6-4e69-9e78-cf864adaa65e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1564.240465] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07ca5a09-bc77-4874-836c-a8fdd841f48b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.246584] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774201, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.253089] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1564.253089] env[63371]: value = "task-1774205" [ 1564.253089] env[63371]: _type = "Task" [ 1564.253089] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.264961] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774205, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.320580] env[63371]: DEBUG nova.network.neutron [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Successfully updated port: bc8b891d-040a-4a55-a281-311c08ae828d {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1564.326718] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9921f477-888a-43ea-a349-c6e48679a913 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.345939] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ce180c7-2194-4239-a619-5a25d9ee0109 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.364028] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.388804] env[63371]: DEBUG nova.compute.manager [req-aa962dcf-4d7a-4499-9e55-5c56d399ecfa req-49f543dc-cb82-4bf7-9e6b-61ff673f3003 service nova] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Detach interface failed, port_id=d233c17c-a3d0-4e06-8087-721a7808298d, reason: Instance b880750e-7bf4-412c-bcff-eb2c343f60f0 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1564.537843] env[63371]: DEBUG nova.compute.manager [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1564.548060] env[63371]: DEBUG nova.network.neutron [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Successfully created port: 8f925cfd-c5f7-4a4f-8782-bea15764877a {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1564.588870] env[63371]: INFO nova.compute.manager [-] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Took 1.51 seconds to deallocate network for instance. [ 1564.614816] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774204, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.745303] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774201, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.767219] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774205, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.830099] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1564.830099] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1564.830099] env[63371]: DEBUG nova.network.neutron [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1565.093224] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3367b1b-9dbb-4cbb-82ed-7eb869b79e95 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.110168] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a73fdc89-368a-47f1-9229-023730f1f073 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.114284] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774204, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.158223] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561959fc-782d-4111-8348-bde7f1233244 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.170096] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a224dd1-5bd9-49d1-bcc4-9aec6443863b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.175133] env[63371]: INFO nova.compute.manager [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Took 0.59 seconds to detach 1 volumes for instance. [ 1565.187325] env[63371]: DEBUG nova.compute.provider_tree [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1565.246470] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774201, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.266612] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774205, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.344435] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Acquiring lock "44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.344681] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Lock "44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.404934] env[63371]: DEBUG nova.network.neutron [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1565.552517] env[63371]: DEBUG nova.compute.manager [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1565.578364] env[63371]: DEBUG nova.virt.hardware [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1565.578612] env[63371]: DEBUG nova.virt.hardware [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1565.578767] env[63371]: DEBUG nova.virt.hardware [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1565.578946] env[63371]: DEBUG nova.virt.hardware [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1565.579483] env[63371]: DEBUG nova.virt.hardware [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1565.579686] env[63371]: DEBUG nova.virt.hardware [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1565.579904] env[63371]: DEBUG nova.virt.hardware [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1565.580079] env[63371]: DEBUG nova.virt.hardware [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1565.580249] env[63371]: DEBUG nova.virt.hardware [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1565.580416] env[63371]: DEBUG nova.virt.hardware [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1565.580632] env[63371]: DEBUG nova.virt.hardware [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1565.581687] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04f882ef-be67-47f9-87f9-732b2bef0c75 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.591941] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-202a1a21-191d-4012-9cda-c7d1e383e973 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.613246] env[63371]: DEBUG nova.network.neutron [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updating instance_info_cache with network_info: [{"id": "bc8b891d-040a-4a55-a281-311c08ae828d", "address": "fa:16:3e:ea:27:0c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8b891d-04", "ovs_interfaceid": "bc8b891d-040a-4a55-a281-311c08ae828d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1565.623406] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774204, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.693472] env[63371]: DEBUG nova.scheduler.client.report [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1565.703017] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.746810] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774201, 'name': CloneVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.767294] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774205, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.305902} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.767603] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9862b0f0-ccf6-4e69-9e78-cf864adaa65e/9862b0f0-ccf6-4e69-9e78-cf864adaa65e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1565.767845] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1565.768163] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0b105d1-32c0-4e81-9e95-867340a8f17f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.776647] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1565.776647] env[63371]: value = "task-1774206" [ 1565.776647] env[63371]: _type = "Task" [ 1565.776647] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.787150] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774206, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.849855] env[63371]: DEBUG nova.compute.manager [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1565.921922] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1565.922094] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.111272] env[63371]: DEBUG oslo_vmware.api [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774204, 'name': PowerOnVM_Task, 'duration_secs': 2.081157} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.112210] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1566.112624] env[63371]: INFO nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Took 12.32 seconds to spawn the instance on the hypervisor. [ 1566.115066] env[63371]: DEBUG nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1566.115066] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11923fb7-0f6f-4a07-9f6f-0293e5f46c68 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.117276] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1566.117703] env[63371]: DEBUG nova.compute.manager [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Instance network_info: |[{"id": "bc8b891d-040a-4a55-a281-311c08ae828d", "address": "fa:16:3e:ea:27:0c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8b891d-04", "ovs_interfaceid": "bc8b891d-040a-4a55-a281-311c08ae828d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1566.119103] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:27:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ba3bd22-c936-470e-89bd-b3a5587e87a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc8b891d-040a-4a55-a281-311c08ae828d', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1566.131021] env[63371]: DEBUG oslo.service.loopingcall [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1566.131021] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1566.131021] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6434fb95-6f22-4bd6-9348-dd066febd69b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.160694] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1566.160694] env[63371]: value = "task-1774207" [ 1566.160694] env[63371]: _type = "Task" [ 1566.160694] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.172811] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774207, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.206007] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.684s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.206419] env[63371]: DEBUG nova.compute.manager [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1566.210465] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.264s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.210701] env[63371]: DEBUG nova.objects.instance [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lazy-loading 'resources' on Instance uuid 0e2c8ced-198f-43be-9d41-703a7c590df4 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1566.250957] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774201, 'name': CloneVM_Task, 'duration_secs': 4.16921} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.250957] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Created linked-clone VM from snapshot [ 1566.251682] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f74d2c-e0c1-4bca-9b4f-0f46d54011b7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.263313] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Uploading image 17c4a4a7-e61d-4eb4-a2f8-2fc20bcfe68b {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1566.288857] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774206, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.186901} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.289289] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1566.290152] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689faf38-0949-4345-9b34-b245ae075323 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.322811] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 9862b0f0-ccf6-4e69-9e78-cf864adaa65e/9862b0f0-ccf6-4e69-9e78-cf864adaa65e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1566.327142] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1566.327142] env[63371]: value = "vm-368376" [ 1566.327142] env[63371]: _type = "VirtualMachine" [ 1566.327142] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1566.327142] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be6edf19-ef1d-4e73-b91d-78642ae3545e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.342088] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-2f1d66b2-5ab6-4464-9270-b1f3b386ada4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.348942] env[63371]: DEBUG nova.compute.manager [req-724c868a-c59f-41a0-a6aa-39ecd8836a49 req-eb09fc8c-66a2-4154-a9e0-8267438ed9f5 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received event network-changed-bc8b891d-040a-4a55-a281-311c08ae828d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1566.349234] env[63371]: DEBUG nova.compute.manager [req-724c868a-c59f-41a0-a6aa-39ecd8836a49 req-eb09fc8c-66a2-4154-a9e0-8267438ed9f5 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Refreshing instance network info cache due to event network-changed-bc8b891d-040a-4a55-a281-311c08ae828d. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1566.349490] env[63371]: DEBUG oslo_concurrency.lockutils [req-724c868a-c59f-41a0-a6aa-39ecd8836a49 req-eb09fc8c-66a2-4154-a9e0-8267438ed9f5 service nova] Acquiring lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.349641] env[63371]: DEBUG oslo_concurrency.lockutils [req-724c868a-c59f-41a0-a6aa-39ecd8836a49 req-eb09fc8c-66a2-4154-a9e0-8267438ed9f5 service nova] Acquired lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.349877] env[63371]: DEBUG nova.network.neutron [req-724c868a-c59f-41a0-a6aa-39ecd8836a49 req-eb09fc8c-66a2-4154-a9e0-8267438ed9f5 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Refreshing network info cache for port bc8b891d-040a-4a55-a281-311c08ae828d {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1566.358929] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1566.358929] env[63371]: value = "task-1774208" [ 1566.358929] env[63371]: _type = "Task" [ 1566.358929] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.364164] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lease: (returnval){ [ 1566.364164] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bc0c6a-39b1-a210-a934-967ab8dd983b" [ 1566.364164] env[63371]: _type = "HttpNfcLease" [ 1566.364164] env[63371]: } obtained for exporting VM: (result){ [ 1566.364164] env[63371]: value = "vm-368376" [ 1566.364164] env[63371]: _type = "VirtualMachine" [ 1566.364164] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1566.364458] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the lease: (returnval){ [ 1566.364458] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bc0c6a-39b1-a210-a934-967ab8dd983b" [ 1566.364458] env[63371]: _type = "HttpNfcLease" [ 1566.364458] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1566.380108] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774208, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.382815] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.383267] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1566.383267] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bc0c6a-39b1-a210-a934-967ab8dd983b" [ 1566.383267] env[63371]: _type = "HttpNfcLease" [ 1566.383267] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1566.383551] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1566.383551] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bc0c6a-39b1-a210-a934-967ab8dd983b" [ 1566.383551] env[63371]: _type = "HttpNfcLease" [ 1566.383551] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1566.384979] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4043155-005c-4d38-80b2-f261b4448090 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.394128] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c5df23-4743-0bad-3ca0-06573eb5a5f4/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1566.394358] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c5df23-4743-0bad-3ca0-06573eb5a5f4/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1566.466329] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.469021] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1566.519169] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-15bb2726-a6b7-42b8-a980-86a772f1b0ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.569445] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.569828] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.635789] env[63371]: DEBUG nova.compute.manager [req-c26f02a5-64f7-4d69-b920-8b3b8ad7bb3c req-23ccb5aa-5bcc-4fbe-b6d6-cbf6400d3e51 service nova] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Received event network-vif-plugged-8f925cfd-c5f7-4a4f-8782-bea15764877a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1566.636122] env[63371]: DEBUG oslo_concurrency.lockutils [req-c26f02a5-64f7-4d69-b920-8b3b8ad7bb3c req-23ccb5aa-5bcc-4fbe-b6d6-cbf6400d3e51 service nova] Acquiring lock "1cb18f2a-6476-4492-8576-7b0fd693a107-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.636458] env[63371]: DEBUG oslo_concurrency.lockutils [req-c26f02a5-64f7-4d69-b920-8b3b8ad7bb3c req-23ccb5aa-5bcc-4fbe-b6d6-cbf6400d3e51 service nova] Lock "1cb18f2a-6476-4492-8576-7b0fd693a107-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.636711] env[63371]: DEBUG oslo_concurrency.lockutils [req-c26f02a5-64f7-4d69-b920-8b3b8ad7bb3c req-23ccb5aa-5bcc-4fbe-b6d6-cbf6400d3e51 service nova] Lock "1cb18f2a-6476-4492-8576-7b0fd693a107-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.636997] env[63371]: DEBUG nova.compute.manager [req-c26f02a5-64f7-4d69-b920-8b3b8ad7bb3c req-23ccb5aa-5bcc-4fbe-b6d6-cbf6400d3e51 service nova] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] No waiting events found dispatching network-vif-plugged-8f925cfd-c5f7-4a4f-8782-bea15764877a {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1566.637316] env[63371]: WARNING nova.compute.manager [req-c26f02a5-64f7-4d69-b920-8b3b8ad7bb3c req-23ccb5aa-5bcc-4fbe-b6d6-cbf6400d3e51 service nova] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Received unexpected event network-vif-plugged-8f925cfd-c5f7-4a4f-8782-bea15764877a for instance with vm_state building and task_state spawning. [ 1566.660449] env[63371]: DEBUG nova.network.neutron [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Successfully updated port: 8f925cfd-c5f7-4a4f-8782-bea15764877a {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1566.670706] env[63371]: INFO nova.compute.manager [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Took 43.99 seconds to build instance. [ 1566.680801] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774207, 'name': CreateVM_Task, 'duration_secs': 0.500867} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.680801] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1566.681622] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.681752] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.682253] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1566.682681] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a1e5ade-7fb5-4562-b80e-09fa4c5f8ab9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.690151] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1566.690151] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52969c81-22a5-1a26-2740-34df89b5cdcb" [ 1566.690151] env[63371]: _type = "Task" [ 1566.690151] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.700949] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52969c81-22a5-1a26-2740-34df89b5cdcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.715611] env[63371]: DEBUG nova.compute.utils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1566.720485] env[63371]: DEBUG nova.compute.manager [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1566.720691] env[63371]: DEBUG nova.network.neutron [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1566.793171] env[63371]: DEBUG nova.policy [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '749358a25ce1499f8fb06cd700a1d537', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd2fecd600f6447dab2440df8abfc83be', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1566.854800] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "1c93487b-6d8f-424d-8b95-10bfb894c609" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.854871] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "1c93487b-6d8f-424d-8b95-10bfb894c609" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.855043] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "1c93487b-6d8f-424d-8b95-10bfb894c609-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.855242] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "1c93487b-6d8f-424d-8b95-10bfb894c609-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1566.855440] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "1c93487b-6d8f-424d-8b95-10bfb894c609-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.860043] env[63371]: INFO nova.compute.manager [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Terminating instance [ 1566.863521] env[63371]: DEBUG nova.compute.manager [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1566.865809] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1566.865809] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9466a03-5ffc-4451-9cbd-7c48fb2ca6a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.880608] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774208, 'name': ReconfigVM_Task, 'duration_secs': 0.413128} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.885617] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 9862b0f0-ccf6-4e69-9e78-cf864adaa65e/9862b0f0-ccf6-4e69-9e78-cf864adaa65e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1566.886576] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1566.887784] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2c8e8f84-e537-40e7-b202-05b5b99d7229 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.889083] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d9ccbff-9729-41e7-a9fa-34b0628492a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.897589] env[63371]: DEBUG oslo_vmware.api [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1566.897589] env[63371]: value = "task-1774211" [ 1566.897589] env[63371]: _type = "Task" [ 1566.897589] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.907935] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1566.907935] env[63371]: value = "task-1774210" [ 1566.907935] env[63371]: _type = "Task" [ 1566.907935] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1566.916760] env[63371]: DEBUG oslo_vmware.api [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774211, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.921057] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774210, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1566.991774] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1566.992918] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquired lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1566.992918] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Forcefully refreshing network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1567.166025] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquiring lock "refresh_cache-1cb18f2a-6476-4492-8576-7b0fd693a107" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.166177] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquired lock "refresh_cache-1cb18f2a-6476-4492-8576-7b0fd693a107" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.166335] env[63371]: DEBUG nova.network.neutron [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1567.174385] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ee5d0a4-89d7-4abb-a8bd-35439c55e2ea tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "e05c7187-b4d6-481e-8bce-deb557dde6a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.270s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.207592] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52969c81-22a5-1a26-2740-34df89b5cdcb, 'name': SearchDatastore_Task, 'duration_secs': 0.011764} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.211020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.211020] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1567.211020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1567.211020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1567.211020] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1567.212227] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95540171-789b-49bf-a30a-c88aec7413f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.225185] env[63371]: DEBUG nova.compute.manager [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1567.232230] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1567.232886] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1567.237653] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dcdb55f6-bb81-472d-8822-44e732c7a7af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.240865] env[63371]: DEBUG nova.network.neutron [req-724c868a-c59f-41a0-a6aa-39ecd8836a49 req-eb09fc8c-66a2-4154-a9e0-8267438ed9f5 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updated VIF entry in instance network info cache for port bc8b891d-040a-4a55-a281-311c08ae828d. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1567.241345] env[63371]: DEBUG nova.network.neutron [req-724c868a-c59f-41a0-a6aa-39ecd8836a49 req-eb09fc8c-66a2-4154-a9e0-8267438ed9f5 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updating instance_info_cache with network_info: [{"id": "bc8b891d-040a-4a55-a281-311c08ae828d", "address": "fa:16:3e:ea:27:0c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8b891d-04", "ovs_interfaceid": "bc8b891d-040a-4a55-a281-311c08ae828d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.252068] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1567.252068] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c42ac1-979d-89d3-efdb-48e00b048e55" [ 1567.252068] env[63371]: _type = "Task" [ 1567.252068] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.262492] env[63371]: DEBUG nova.network.neutron [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Successfully created port: ca5ead57-035d-446f-8117-2c2374008be8 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1567.275777] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c42ac1-979d-89d3-efdb-48e00b048e55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.308597] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ae217c-73bb-4b29-a3aa-386b6bda5b5a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.317685] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc961b8a-b4f6-4e09-9d39-06915705563b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.352649] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee77988-01c7-4ec7-a950-55e2e0579b76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.361742] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96173bf-1e70-447e-b651-0701ded92962 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.376679] env[63371]: DEBUG nova.compute.provider_tree [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1567.408165] env[63371]: DEBUG oslo_vmware.api [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774211, 'name': PowerOffVM_Task, 'duration_secs': 0.292078} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.408468] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1567.408639] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1567.408943] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce404289-6894-43ea-989b-e8f2c5e739a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.421315] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774210, 'name': Rename_Task, 'duration_secs': 0.195817} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.423388] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1567.423388] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4632bef0-a0b5-44d2-bf47-54e47c7b3ae8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.430149] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1567.430149] env[63371]: value = "task-1774213" [ 1567.430149] env[63371]: _type = "Task" [ 1567.430149] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.439593] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774213, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.453390] env[63371]: DEBUG oslo_concurrency.lockutils [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "e05c7187-b4d6-481e-8bce-deb557dde6a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.453863] env[63371]: DEBUG oslo_concurrency.lockutils [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "e05c7187-b4d6-481e-8bce-deb557dde6a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.454214] env[63371]: DEBUG oslo_concurrency.lockutils [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "e05c7187-b4d6-481e-8bce-deb557dde6a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.454587] env[63371]: DEBUG oslo_concurrency.lockutils [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "e05c7187-b4d6-481e-8bce-deb557dde6a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.456233] env[63371]: DEBUG oslo_concurrency.lockutils [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "e05c7187-b4d6-481e-8bce-deb557dde6a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.457847] env[63371]: INFO nova.compute.manager [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Terminating instance [ 1567.459445] env[63371]: DEBUG nova.compute.manager [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1567.459689] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1567.460572] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1642f98-c30b-42fd-9346-f2ef846df72c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.471370] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1567.471370] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84b22f8b-d13c-4ba9-b6a4-a18707145cbe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.476553] env[63371]: DEBUG oslo_vmware.api [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1567.476553] env[63371]: value = "task-1774214" [ 1567.476553] env[63371]: _type = "Task" [ 1567.476553] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.486699] env[63371]: DEBUG oslo_vmware.api [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774214, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.501399] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1567.501719] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1567.502549] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleting the datastore file [datastore1] 1c93487b-6d8f-424d-8b95-10bfb894c609 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1567.506023] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71d54401-00e5-417c-be8f-8da3f3684b00 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.509995] env[63371]: DEBUG oslo_vmware.api [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1567.509995] env[63371]: value = "task-1774215" [ 1567.509995] env[63371]: _type = "Task" [ 1567.509995] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.518990] env[63371]: DEBUG oslo_vmware.api [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774215, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.519985] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1567.677091] env[63371]: DEBUG nova.compute.manager [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1567.714634] env[63371]: DEBUG nova.network.neutron [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1567.733799] env[63371]: INFO nova.virt.block_device [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Booting with volume 32d41ea7-8d37-4108-a5fd-9dd5e6d351de at /dev/sda [ 1567.747052] env[63371]: DEBUG oslo_concurrency.lockutils [req-724c868a-c59f-41a0-a6aa-39ecd8836a49 req-eb09fc8c-66a2-4154-a9e0-8267438ed9f5 service nova] Releasing lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1567.766281] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c42ac1-979d-89d3-efdb-48e00b048e55, 'name': SearchDatastore_Task, 'duration_secs': 0.023011} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.767373] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92f99f0a-688c-48c4-8de3-8ab304e2f719 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.774194] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1567.774194] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526883ec-5543-48bb-2695-5103a916d026" [ 1567.774194] env[63371]: _type = "Task" [ 1567.774194] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.781016] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7e5b5812-df7b-407f-af63-c99b5ca06747 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.786460] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526883ec-5543-48bb-2695-5103a916d026, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.797238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d051d2-39a6-41b4-91e5-01bfb09dac07 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.840286] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-afec3c46-6850-4ba4-9e10-9e3fe150ad31 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.851367] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc55e0d-fe82-4526-9481-64cd963ad549 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.879767] env[63371]: DEBUG nova.scheduler.client.report [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1567.900115] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc2f576-288e-404e-a187-436bfebec9b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.909563] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3932362-2f05-4bf1-9662-a5e9d3528a47 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.927390] env[63371]: DEBUG nova.virt.block_device [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Updating existing volume attachment record: 3edb6ec3-a812-42a7-a42b-766c6e446ecb {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1567.945023] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774213, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.970677] env[63371]: DEBUG nova.network.neutron [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Updating instance_info_cache with network_info: [{"id": "8f925cfd-c5f7-4a4f-8782-bea15764877a", "address": "fa:16:3e:62:c5:65", "network": {"id": "c97f25eb-505d-4e0d-986f-8b94c6bebc7f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1979348833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51d56c0c34d04535bbac2f8255380c32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b49c5024-2ced-42ca-90cc-6066766d43e6", "external-id": "nsx-vlan-transportzone-239", "segmentation_id": 239, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f925cfd-c5", "ovs_interfaceid": "8f925cfd-c5f7-4a4f-8782-bea15764877a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1567.987781] env[63371]: DEBUG oslo_vmware.api [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774214, 'name': PowerOffVM_Task, 'duration_secs': 0.285738} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1567.988075] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1567.988250] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1567.988500] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c4d5de3-07e4-4f48-a306-6feb44da0199 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.020992] env[63371]: DEBUG oslo_vmware.api [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774215, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.282366} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.021372] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1568.021572] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1568.021748] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1568.021928] env[63371]: INFO nova.compute.manager [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1568.022286] env[63371]: DEBUG oslo.service.loopingcall [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1568.024282] env[63371]: DEBUG nova.compute.manager [-] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1568.024378] env[63371]: DEBUG nova.network.neutron [-] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1568.080424] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1568.080783] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1568.081135] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Deleting the datastore file [datastore1] e05c7187-b4d6-481e-8bce-deb557dde6a8 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1568.081550] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-51fc591a-0842-434f-b39c-4b1726256c7f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.091480] env[63371]: DEBUG oslo_vmware.api [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for the task: (returnval){ [ 1568.091480] env[63371]: value = "task-1774217" [ 1568.091480] env[63371]: _type = "Task" [ 1568.091480] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.100496] env[63371]: DEBUG oslo_vmware.api [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774217, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.204103] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.212107] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.287670] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526883ec-5543-48bb-2695-5103a916d026, 'name': SearchDatastore_Task, 'duration_secs': 0.017148} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.288380] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.288652] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] d00602b9-16bf-4c11-bc47-6076dddbf159/d00602b9-16bf-4c11-bc47-6076dddbf159.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1568.289008] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-80bfef28-ae04-4e67-9f13-8982c0d5901c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.297795] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1568.297795] env[63371]: value = "task-1774218" [ 1568.297795] env[63371]: _type = "Task" [ 1568.297795] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.308879] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774218, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.402918] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.193s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1568.405504] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.482s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.407219] env[63371]: INFO nova.compute.claims [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1568.434226] env[63371]: INFO nova.scheduler.client.report [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Deleted allocations for instance 0e2c8ced-198f-43be-9d41-703a7c590df4 [ 1568.454875] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774213, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.475974] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Releasing lock "refresh_cache-1cb18f2a-6476-4492-8576-7b0fd693a107" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.475974] env[63371]: DEBUG nova.compute.manager [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Instance network_info: |[{"id": "8f925cfd-c5f7-4a4f-8782-bea15764877a", "address": "fa:16:3e:62:c5:65", "network": {"id": "c97f25eb-505d-4e0d-986f-8b94c6bebc7f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1979348833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51d56c0c34d04535bbac2f8255380c32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b49c5024-2ced-42ca-90cc-6066766d43e6", "external-id": "nsx-vlan-transportzone-239", "segmentation_id": 239, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f925cfd-c5", "ovs_interfaceid": "8f925cfd-c5f7-4a4f-8782-bea15764877a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1568.475974] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:c5:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b49c5024-2ced-42ca-90cc-6066766d43e6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f925cfd-c5f7-4a4f-8782-bea15764877a', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1568.483652] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Creating folder: Project (51d56c0c34d04535bbac2f8255380c32). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1568.487951] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a15ee61-006c-4031-83d6-524c38e0cd5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.497409] env[63371]: DEBUG nova.compute.manager [req-bb77b1e0-6d4a-4b37-895a-1684b5f3ff58 req-3a8dee7b-80c3-4f31-bcc4-9949c847f612 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Received event network-vif-deleted-f7958f5c-d0af-44e7-bbb2-e6fa265a6da3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1568.497632] env[63371]: INFO nova.compute.manager [req-bb77b1e0-6d4a-4b37-895a-1684b5f3ff58 req-3a8dee7b-80c3-4f31-bcc4-9949c847f612 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Neutron deleted interface f7958f5c-d0af-44e7-bbb2-e6fa265a6da3; detaching it from the instance and deleting it from the info cache [ 1568.497812] env[63371]: DEBUG nova.network.neutron [req-bb77b1e0-6d4a-4b37-895a-1684b5f3ff58 req-3a8dee7b-80c3-4f31-bcc4-9949c847f612 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.501934] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Created folder: Project (51d56c0c34d04535bbac2f8255380c32) in parent group-v368199. [ 1568.502151] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Creating folder: Instances. Parent ref: group-v368378. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1568.502404] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-090cd41a-2910-4589-8220-2357d7ef40bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.516150] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Created folder: Instances in parent group-v368378. [ 1568.516429] env[63371]: DEBUG oslo.service.loopingcall [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1568.516629] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1568.516846] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5d67c7a-5da5-415e-8ce9-025b5c37e38a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.540382] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1568.540382] env[63371]: value = "task-1774221" [ 1568.540382] env[63371]: _type = "Task" [ 1568.540382] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1568.550124] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774221, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.605825] env[63371]: DEBUG oslo_vmware.api [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Task: {'id': task-1774217, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24267} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.606171] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1568.606425] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1568.606635] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1568.606892] env[63371]: INFO nova.compute.manager [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1568.607152] env[63371]: DEBUG oslo.service.loopingcall [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1568.607397] env[63371]: DEBUG nova.compute.manager [-] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1568.607519] env[63371]: DEBUG nova.network.neutron [-] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1568.670791] env[63371]: DEBUG nova.compute.manager [req-2b17ebd9-3e89-49fb-a930-882f66fbe01c req-386158fe-e6d5-4f92-b0c7-ed7e1289d042 service nova] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Received event network-changed-8f925cfd-c5f7-4a4f-8782-bea15764877a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1568.671022] env[63371]: DEBUG nova.compute.manager [req-2b17ebd9-3e89-49fb-a930-882f66fbe01c req-386158fe-e6d5-4f92-b0c7-ed7e1289d042 service nova] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Refreshing instance network info cache due to event network-changed-8f925cfd-c5f7-4a4f-8782-bea15764877a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1568.671869] env[63371]: DEBUG oslo_concurrency.lockutils [req-2b17ebd9-3e89-49fb-a930-882f66fbe01c req-386158fe-e6d5-4f92-b0c7-ed7e1289d042 service nova] Acquiring lock "refresh_cache-1cb18f2a-6476-4492-8576-7b0fd693a107" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.671869] env[63371]: DEBUG oslo_concurrency.lockutils [req-2b17ebd9-3e89-49fb-a930-882f66fbe01c req-386158fe-e6d5-4f92-b0c7-ed7e1289d042 service nova] Acquired lock "refresh_cache-1cb18f2a-6476-4492-8576-7b0fd693a107" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.672042] env[63371]: DEBUG nova.network.neutron [req-2b17ebd9-3e89-49fb-a930-882f66fbe01c req-386158fe-e6d5-4f92-b0c7-ed7e1289d042 service nova] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Refreshing network info cache for port 8f925cfd-c5f7-4a4f-8782-bea15764877a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1568.714976] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Releasing lock "refresh_cache-47c1c242-d190-4523-8033-307c5a9b7535" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1568.715284] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Updated the network info_cache for instance {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1568.715937] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.716447] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.717202] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.717514] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.718899] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.719315] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.719568] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1568.719919] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1568.809982] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774218, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.903467] env[63371]: DEBUG nova.network.neutron [-] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1568.949520] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774213, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1568.951851] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c782498d-71da-4001-b2e9-b316e8ff305b tempest-ServerAddressesTestJSON-1509524884 tempest-ServerAddressesTestJSON-1509524884-project-member] Lock "0e2c8ced-198f-43be-9d41-703a7c590df4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.139s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.002361] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a1aecac4-2c4b-498b-94de-67649a3461d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.015085] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f084a0-fe1f-431e-a66d-b9628d211ca8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.057074] env[63371]: DEBUG nova.compute.manager [req-bb77b1e0-6d4a-4b37-895a-1684b5f3ff58 req-3a8dee7b-80c3-4f31-bcc4-9949c847f612 service nova] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Detach interface failed, port_id=f7958f5c-d0af-44e7-bbb2-e6fa265a6da3, reason: Instance 1c93487b-6d8f-424d-8b95-10bfb894c609 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1569.069874] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774221, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.224435] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.258439] env[63371]: DEBUG nova.network.neutron [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Successfully updated port: ca5ead57-035d-446f-8117-2c2374008be8 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1569.312486] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774218, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.613648} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.312771] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] d00602b9-16bf-4c11-bc47-6076dddbf159/d00602b9-16bf-4c11-bc47-6076dddbf159.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1569.312982] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1569.313250] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3293554e-d107-43ec-b201-97087c3fa095 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.321246] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1569.321246] env[63371]: value = "task-1774222" [ 1569.321246] env[63371]: _type = "Task" [ 1569.321246] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.337351] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774222, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.380612] env[63371]: DEBUG nova.network.neutron [-] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.407593] env[63371]: INFO nova.compute.manager [-] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Took 1.38 seconds to deallocate network for instance. [ 1569.458497] env[63371]: DEBUG oslo_vmware.api [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774213, 'name': PowerOnVM_Task, 'duration_secs': 1.681609} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.460184] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1569.460613] env[63371]: INFO nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Took 13.12 seconds to spawn the instance on the hypervisor. [ 1569.460849] env[63371]: DEBUG nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1569.462235] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c829b3-d3fd-4a9d-902e-5ff968469e1f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.477959] env[63371]: DEBUG nova.network.neutron [req-2b17ebd9-3e89-49fb-a930-882f66fbe01c req-386158fe-e6d5-4f92-b0c7-ed7e1289d042 service nova] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Updated VIF entry in instance network info cache for port 8f925cfd-c5f7-4a4f-8782-bea15764877a. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1569.477959] env[63371]: DEBUG nova.network.neutron [req-2b17ebd9-3e89-49fb-a930-882f66fbe01c req-386158fe-e6d5-4f92-b0c7-ed7e1289d042 service nova] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Updating instance_info_cache with network_info: [{"id": "8f925cfd-c5f7-4a4f-8782-bea15764877a", "address": "fa:16:3e:62:c5:65", "network": {"id": "c97f25eb-505d-4e0d-986f-8b94c6bebc7f", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-1979348833-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "51d56c0c34d04535bbac2f8255380c32", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b49c5024-2ced-42ca-90cc-6066766d43e6", "external-id": "nsx-vlan-transportzone-239", "segmentation_id": 239, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f925cfd-c5", "ovs_interfaceid": "8f925cfd-c5f7-4a4f-8782-bea15764877a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.575129] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774221, 'name': CreateVM_Task, 'duration_secs': 0.768814} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.575712] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1569.576695] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.576989] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.577562] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1569.579250] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1f479c3-b974-4b6b-8ddf-491229d89954 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.586792] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Waiting for the task: (returnval){ [ 1569.586792] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b608b8-1865-2522-a2c6-3f773a2a9e96" [ 1569.586792] env[63371]: _type = "Task" [ 1569.586792] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.598589] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b608b8-1865-2522-a2c6-3f773a2a9e96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.763918] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Acquiring lock "refresh_cache-c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.764197] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Acquired lock "refresh_cache-c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.764279] env[63371]: DEBUG nova.network.neutron [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1569.834784] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774222, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.132292} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.840023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1569.840023] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49819a35-c75a-485e-82d6-956a7b4ebd1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.867678] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] d00602b9-16bf-4c11-bc47-6076dddbf159/d00602b9-16bf-4c11-bc47-6076dddbf159.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1569.870844] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2560c795-a4b5-4bf7-91e2-37f65fde3139 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.889601] env[63371]: INFO nova.compute.manager [-] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Took 1.28 seconds to deallocate network for instance. [ 1569.907865] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1569.907865] env[63371]: value = "task-1774223" [ 1569.907865] env[63371]: _type = "Task" [ 1569.907865] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.914473] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.914766] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774223, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.978014] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f025a237-bcc4-4c76-ad23-c70c32fccb2f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.989288] env[63371]: DEBUG oslo_concurrency.lockutils [req-2b17ebd9-3e89-49fb-a930-882f66fbe01c req-386158fe-e6d5-4f92-b0c7-ed7e1289d042 service nova] Releasing lock "refresh_cache-1cb18f2a-6476-4492-8576-7b0fd693a107" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1569.993547] env[63371]: INFO nova.compute.manager [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Took 45.04 seconds to build instance. [ 1569.997890] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f8697d1-d9f0-4351-bb92-60314800dcc5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.036211] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26364d9e-a93b-4ac7-ad4f-474e0de7a755 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.047269] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bf6202-c09e-4253-896a-838897e5de9d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.065474] env[63371]: DEBUG nova.compute.provider_tree [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1570.078508] env[63371]: DEBUG nova.compute.manager [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1570.079474] env[63371]: DEBUG nova.virt.hardware [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1570.079879] env[63371]: DEBUG nova.virt.hardware [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1570.080236] env[63371]: DEBUG nova.virt.hardware [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1570.080236] env[63371]: DEBUG nova.virt.hardware [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1570.080378] env[63371]: DEBUG nova.virt.hardware [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1570.080546] env[63371]: DEBUG nova.virt.hardware [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1570.080966] env[63371]: DEBUG nova.virt.hardware [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1570.081256] env[63371]: DEBUG nova.virt.hardware [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1570.081475] env[63371]: DEBUG nova.virt.hardware [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1570.081986] env[63371]: DEBUG nova.virt.hardware [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1570.082261] env[63371]: DEBUG nova.virt.hardware [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1570.083904] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973479d0-c5a8-41de-b10a-9e0a5dd85073 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.097373] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b608b8-1865-2522-a2c6-3f773a2a9e96, 'name': SearchDatastore_Task, 'duration_secs': 0.017543} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.100151] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.100151] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1570.100351] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.100470] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1570.100640] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1570.100950] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e7c9263-b5e3-4bfb-adf4-ed6dc315c762 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.104417] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cf0530-11bb-4c0b-b9cd-ab4ef7cc3c1c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.120683] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1570.120873] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1570.121697] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d628b18-6806-4d8e-9a25-fb31e2077f20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.128846] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Waiting for the task: (returnval){ [ 1570.128846] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52133862-6c97-b832-b018-3a0b9a1c8492" [ 1570.128846] env[63371]: _type = "Task" [ 1570.128846] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.139747] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52133862-6c97-b832-b018-3a0b9a1c8492, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.313015] env[63371]: DEBUG nova.network.neutron [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1570.400561] env[63371]: DEBUG oslo_concurrency.lockutils [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.421699] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774223, 'name': ReconfigVM_Task, 'duration_secs': 0.479482} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.422293] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Reconfigured VM instance instance-0000003e to attach disk [datastore1] d00602b9-16bf-4c11-bc47-6076dddbf159/d00602b9-16bf-4c11-bc47-6076dddbf159.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1570.422843] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0815c2aa-05f6-460b-a19e-d59066698ced {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.432692] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1570.432692] env[63371]: value = "task-1774224" [ 1570.432692] env[63371]: _type = "Task" [ 1570.432692] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.445220] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774224, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.499191] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fd14921f-0575-4b9f-a27a-5e551dd00b37 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.122s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.568624] env[63371]: DEBUG nova.scheduler.client.report [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1570.583235] env[63371]: DEBUG nova.compute.manager [req-62259854-fd30-4590-a013-4afd1188cb21 req-dde55239-e578-48cb-ac29-beb4c8eff006 service nova] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Received event network-vif-deleted-dbbac158-9444-441f-b15b-2a793507b64f {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1570.640154] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52133862-6c97-b832-b018-3a0b9a1c8492, 'name': SearchDatastore_Task, 'duration_secs': 0.017041} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.641311] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a09ba97-fa04-4770-b1cb-67a63fd0e7fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.648278] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Waiting for the task: (returnval){ [ 1570.648278] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52448f27-fa97-e0d2-7812-cf998e254693" [ 1570.648278] env[63371]: _type = "Task" [ 1570.648278] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.658293] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52448f27-fa97-e0d2-7812-cf998e254693, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.801398] env[63371]: DEBUG nova.network.neutron [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Updating instance_info_cache with network_info: [{"id": "ca5ead57-035d-446f-8117-2c2374008be8", "address": "fa:16:3e:27:7e:a3", "network": {"id": "85c5014e-02e5-457c-b241-aab48881a0a2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-266845225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2fecd600f6447dab2440df8abfc83be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec1528b-3e87-477b-8ab2-02696ad47e66", "external-id": "nsx-vlan-transportzone-180", "segmentation_id": 180, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca5ead57-03", "ovs_interfaceid": "ca5ead57-035d-446f-8117-2c2374008be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.923179] env[63371]: DEBUG nova.compute.manager [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Received event network-vif-plugged-ca5ead57-035d-446f-8117-2c2374008be8 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1570.923179] env[63371]: DEBUG oslo_concurrency.lockutils [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] Acquiring lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.924242] env[63371]: DEBUG oslo_concurrency.lockutils [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] Lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.924605] env[63371]: DEBUG oslo_concurrency.lockutils [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] Lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1570.924930] env[63371]: DEBUG nova.compute.manager [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] No waiting events found dispatching network-vif-plugged-ca5ead57-035d-446f-8117-2c2374008be8 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1570.925357] env[63371]: WARNING nova.compute.manager [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Received unexpected event network-vif-plugged-ca5ead57-035d-446f-8117-2c2374008be8 for instance with vm_state building and task_state spawning. [ 1570.926096] env[63371]: DEBUG nova.compute.manager [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Received event network-changed-ca5ead57-035d-446f-8117-2c2374008be8 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1570.926096] env[63371]: DEBUG nova.compute.manager [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Refreshing instance network info cache due to event network-changed-ca5ead57-035d-446f-8117-2c2374008be8. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1570.926096] env[63371]: DEBUG oslo_concurrency.lockutils [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] Acquiring lock "refresh_cache-c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1570.945020] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774224, 'name': Rename_Task, 'duration_secs': 0.361463} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1570.945337] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1570.945629] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfba36d4-b98d-43b7-8d65-0dde52308df2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1570.954220] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1570.954220] env[63371]: value = "task-1774225" [ 1570.954220] env[63371]: _type = "Task" [ 1570.954220] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1570.963812] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774225, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.080084] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.080627] env[63371]: DEBUG nova.compute.manager [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1571.084055] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.826s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.084055] env[63371]: DEBUG nova.objects.instance [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1571.165970] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52448f27-fa97-e0d2-7812-cf998e254693, 'name': SearchDatastore_Task, 'duration_secs': 0.022802} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.166117] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.166547] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1cb18f2a-6476-4492-8576-7b0fd693a107/1cb18f2a-6476-4492-8576-7b0fd693a107.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1571.166680] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd8eb294-e11f-4a6a-9891-ec21b8be0467 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.178030] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Waiting for the task: (returnval){ [ 1571.178030] env[63371]: value = "task-1774226" [ 1571.178030] env[63371]: _type = "Task" [ 1571.178030] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.188505] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774226, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.306767] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Releasing lock "refresh_cache-c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.307132] env[63371]: DEBUG nova.compute.manager [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Instance network_info: |[{"id": "ca5ead57-035d-446f-8117-2c2374008be8", "address": "fa:16:3e:27:7e:a3", "network": {"id": "85c5014e-02e5-457c-b241-aab48881a0a2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-266845225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2fecd600f6447dab2440df8abfc83be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec1528b-3e87-477b-8ab2-02696ad47e66", "external-id": "nsx-vlan-transportzone-180", "segmentation_id": 180, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca5ead57-03", "ovs_interfaceid": "ca5ead57-035d-446f-8117-2c2374008be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1571.307445] env[63371]: DEBUG oslo_concurrency.lockutils [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] Acquired lock "refresh_cache-c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.307819] env[63371]: DEBUG nova.network.neutron [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Refreshing network info cache for port ca5ead57-035d-446f-8117-2c2374008be8 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1571.309421] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:7e:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bec1528b-3e87-477b-8ab2-02696ad47e66', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca5ead57-035d-446f-8117-2c2374008be8', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1571.319041] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Creating folder: Project (d2fecd600f6447dab2440df8abfc83be). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1571.322752] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d410c424-1a7b-4d26-976a-0149d214cc91 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.342569] env[63371]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1571.342748] env[63371]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63371) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1571.343185] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Folder already exists: Project (d2fecd600f6447dab2440df8abfc83be). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1571.343431] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Creating folder: Instances. Parent ref: group-v368340. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1571.343729] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f6a26f4-d877-4284-bd19-1d65397aeb3a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.359428] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Created folder: Instances in parent group-v368340. [ 1571.360256] env[63371]: DEBUG oslo.service.loopingcall [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1571.360256] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1571.360414] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c66ba82e-0707-4e91-9881-64a0352371f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.393021] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1571.393021] env[63371]: value = "task-1774229" [ 1571.393021] env[63371]: _type = "Task" [ 1571.393021] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.406637] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774229, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.466091] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774225, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.590178] env[63371]: DEBUG nova.compute.utils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1571.599254] env[63371]: DEBUG nova.compute.manager [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1571.599254] env[63371]: DEBUG nova.network.neutron [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1571.689963] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774226, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.697921] env[63371]: DEBUG nova.policy [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c6416719728485f8dd45eea9e39fdc5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58f967d3770541269fb89f48b3df58c9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1571.733685] env[63371]: DEBUG nova.network.neutron [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Updated VIF entry in instance network info cache for port ca5ead57-035d-446f-8117-2c2374008be8. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1571.734559] env[63371]: DEBUG nova.network.neutron [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Updating instance_info_cache with network_info: [{"id": "ca5ead57-035d-446f-8117-2c2374008be8", "address": "fa:16:3e:27:7e:a3", "network": {"id": "85c5014e-02e5-457c-b241-aab48881a0a2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-266845225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2fecd600f6447dab2440df8abfc83be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec1528b-3e87-477b-8ab2-02696ad47e66", "external-id": "nsx-vlan-transportzone-180", "segmentation_id": 180, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca5ead57-03", "ovs_interfaceid": "ca5ead57-035d-446f-8117-2c2374008be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1571.908098] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774229, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.970737] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774225, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.100025] env[63371]: DEBUG nova.compute.manager [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1572.110054] env[63371]: DEBUG oslo_concurrency.lockutils [None req-836dfb9a-5abe-4a57-9d72-8b07982683a9 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.110054] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.753s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.111729] env[63371]: INFO nova.compute.claims [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1572.136979] env[63371]: DEBUG nova.network.neutron [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Successfully created port: 0ea63013-3179-492e-89dd-074b2ed530c0 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1572.190049] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774226, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.735307} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.190352] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1cb18f2a-6476-4492-8576-7b0fd693a107/1cb18f2a-6476-4492-8576-7b0fd693a107.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1572.190772] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1572.190861] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79154b5c-798b-4a1f-8edc-9738f9162ad6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.200549] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Waiting for the task: (returnval){ [ 1572.200549] env[63371]: value = "task-1774230" [ 1572.200549] env[63371]: _type = "Task" [ 1572.200549] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.211329] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774230, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.237729] env[63371]: DEBUG oslo_concurrency.lockutils [req-204fd364-16f4-43e1-915d-cec9813b1010 req-f6c093d8-cace-4c4d-a89d-df7d4c64bff3 service nova] Releasing lock "refresh_cache-c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.404673] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774229, 'name': CreateVM_Task, 'duration_secs': 0.822617} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.404881] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1572.407565] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'disk_bus': None, 'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368346', 'volume_id': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'name': 'volume-32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c03e2dc4-75d9-4fbb-afc8-046cbbf908ac', 'attached_at': '', 'detached_at': '', 'volume_id': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'serial': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de'}, 'boot_index': 0, 'device_type': None, 'attachment_id': '3edb6ec3-a812-42a7-a42b-766c6e446ecb', 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=63371) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1572.407565] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Root volume attach. Driver type: vmdk {{(pid=63371) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1572.411619] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ac7d24-82a3-4a6a-a100-7c3d87615f31 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.418895] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c882e95d-97ad-470c-af22-58447a5f33be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.428057] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dab825f6-e5fa-4b32-844a-95b7c96639bc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.438073] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-c77216b9-a443-4cf6-92f8-e5d13852b1f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.449986] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Waiting for the task: (returnval){ [ 1572.449986] env[63371]: value = "task-1774231" [ 1572.449986] env[63371]: _type = "Task" [ 1572.449986] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.470762] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774231, 'name': RelocateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.476264] env[63371]: DEBUG oslo_vmware.api [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774225, 'name': PowerOnVM_Task, 'duration_secs': 1.053807} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.477590] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1572.477590] env[63371]: INFO nova.compute.manager [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Took 9.50 seconds to spawn the instance on the hypervisor. [ 1572.477822] env[63371]: DEBUG nova.compute.manager [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1572.479174] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf3f044b-c260-48b4-83ed-327cb401dab9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.716027] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774230, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.182651} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.716027] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1572.716027] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f3a8def-d035-4da9-b20d-f3b520e683dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.742515] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 1cb18f2a-6476-4492-8576-7b0fd693a107/1cb18f2a-6476-4492-8576-7b0fd693a107.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1572.743111] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-423cf4e3-9d99-4628-9aa0-4fa2beeb16bc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.766727] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Waiting for the task: (returnval){ [ 1572.766727] env[63371]: value = "task-1774232" [ 1572.766727] env[63371]: _type = "Task" [ 1572.766727] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.778814] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774232, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.962945] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774231, 'name': RelocateVM_Task} progress is 20%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.992046] env[63371]: DEBUG nova.compute.manager [req-ff7b2c71-3edc-4d78-9ddc-093a79a07e21 req-8e3ceb1f-c609-4293-8ed7-d21708dc0f3a service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Received event network-changed-82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1572.992814] env[63371]: DEBUG nova.compute.manager [req-ff7b2c71-3edc-4d78-9ddc-093a79a07e21 req-8e3ceb1f-c609-4293-8ed7-d21708dc0f3a service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Refreshing instance network info cache due to event network-changed-82aece5e-dc40-4c18-a1a9-4b4e859fef2a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1572.993103] env[63371]: DEBUG oslo_concurrency.lockutils [req-ff7b2c71-3edc-4d78-9ddc-093a79a07e21 req-8e3ceb1f-c609-4293-8ed7-d21708dc0f3a service nova] Acquiring lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.993257] env[63371]: DEBUG oslo_concurrency.lockutils [req-ff7b2c71-3edc-4d78-9ddc-093a79a07e21 req-8e3ceb1f-c609-4293-8ed7-d21708dc0f3a service nova] Acquired lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.993554] env[63371]: DEBUG nova.network.neutron [req-ff7b2c71-3edc-4d78-9ddc-093a79a07e21 req-8e3ceb1f-c609-4293-8ed7-d21708dc0f3a service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Refreshing network info cache for port 82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1573.055318] env[63371]: INFO nova.compute.manager [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Took 41.11 seconds to build instance. [ 1573.121197] env[63371]: DEBUG nova.compute.manager [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1573.160394] env[63371]: DEBUG nova.virt.hardware [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1573.160763] env[63371]: DEBUG nova.virt.hardware [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1573.160991] env[63371]: DEBUG nova.virt.hardware [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1573.161321] env[63371]: DEBUG nova.virt.hardware [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1573.161555] env[63371]: DEBUG nova.virt.hardware [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1573.161795] env[63371]: DEBUG nova.virt.hardware [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1573.162125] env[63371]: DEBUG nova.virt.hardware [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1573.162433] env[63371]: DEBUG nova.virt.hardware [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1573.162627] env[63371]: DEBUG nova.virt.hardware [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1573.164432] env[63371]: DEBUG nova.virt.hardware [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1573.164940] env[63371]: DEBUG nova.virt.hardware [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1573.166226] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a2a211-2118-4c66-b8ff-866839420f4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.190945] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8034a30-57c9-4a2c-b983-da09ee2215e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.288649] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774232, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.465744] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774231, 'name': RelocateVM_Task, 'duration_secs': 0.658699} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.471542] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1573.471924] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368346', 'volume_id': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'name': 'volume-32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c03e2dc4-75d9-4fbb-afc8-046cbbf908ac', 'attached_at': '', 'detached_at': '', 'volume_id': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'serial': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1573.473807] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cca589a-8c96-4f5e-84b0-56a7de99ef10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.495156] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43032509-9344-43a7-acbe-cae38722c33d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.523539] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] volume-32d41ea7-8d37-4108-a5fd-9dd5e6d351de/volume-32d41ea7-8d37-4108-a5fd-9dd5e6d351de.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1573.527058] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-380fec05-8a80-4e55-bde4-060b4602e1ad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.549872] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Waiting for the task: (returnval){ [ 1573.549872] env[63371]: value = "task-1774233" [ 1573.549872] env[63371]: _type = "Task" [ 1573.549872] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.560989] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a00a0fd-16f2-4a1d-a380-8de2cea5bd63 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.104s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1573.561386] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774233, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.776131] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1cbe07-1ce6-4aec-a259-1faed4c04815 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.789568] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774232, 'name': ReconfigVM_Task, 'duration_secs': 0.705169} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.795485] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 1cb18f2a-6476-4492-8576-7b0fd693a107/1cb18f2a-6476-4492-8576-7b0fd693a107.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1573.796750] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98485cb2-3479-4884-a065-53d788789296 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.800512] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e6004e-fa41-41ed-b98e-97e1347c9d7b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.861839] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fb42aa-fdc0-434e-929e-f68c846dfacc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.865263] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Waiting for the task: (returnval){ [ 1573.865263] env[63371]: value = "task-1774234" [ 1573.865263] env[63371]: _type = "Task" [ 1573.865263] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.878274] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095d9f9e-9d72-42b7-ae46-9e5a5afe6edd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.890263] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774234, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.905290] env[63371]: DEBUG nova.compute.provider_tree [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1574.062416] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774233, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.159393] env[63371]: DEBUG nova.network.neutron [req-ff7b2c71-3edc-4d78-9ddc-093a79a07e21 req-8e3ceb1f-c609-4293-8ed7-d21708dc0f3a service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updated VIF entry in instance network info cache for port 82aece5e-dc40-4c18-a1a9-4b4e859fef2a. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1574.159768] env[63371]: DEBUG nova.network.neutron [req-ff7b2c71-3edc-4d78-9ddc-093a79a07e21 req-8e3ceb1f-c609-4293-8ed7-d21708dc0f3a service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance_info_cache with network_info: [{"id": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "address": "fa:16:3e:50:09:23", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82aece5e-dc", "ovs_interfaceid": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1574.382790] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774234, 'name': Rename_Task, 'duration_secs': 0.332349} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.383163] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1574.383525] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f954ea3-8a2a-4a05-8db5-04d12b68d254 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.395669] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Waiting for the task: (returnval){ [ 1574.395669] env[63371]: value = "task-1774235" [ 1574.395669] env[63371]: _type = "Task" [ 1574.395669] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.409908] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774235, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.409908] env[63371]: DEBUG nova.scheduler.client.report [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1574.425826] env[63371]: DEBUG nova.network.neutron [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Successfully updated port: 0ea63013-3179-492e-89dd-074b2ed530c0 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1574.563785] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774233, 'name': ReconfigVM_Task, 'duration_secs': 0.693596} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.564120] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Reconfigured VM instance instance-00000040 to attach disk [datastore1] volume-32d41ea7-8d37-4108-a5fd-9dd5e6d351de/volume-32d41ea7-8d37-4108-a5fd-9dd5e6d351de.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1574.571212] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-760b76c3-db03-410b-b550-8198b9f337d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.599334] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Waiting for the task: (returnval){ [ 1574.599334] env[63371]: value = "task-1774236" [ 1574.599334] env[63371]: _type = "Task" [ 1574.599334] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.623407] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774236, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.667054] env[63371]: DEBUG oslo_concurrency.lockutils [req-ff7b2c71-3edc-4d78-9ddc-093a79a07e21 req-8e3ceb1f-c609-4293-8ed7-d21708dc0f3a service nova] Releasing lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.907945] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774235, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.917271] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.808s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.918319] env[63371]: DEBUG nova.compute.manager [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1574.921568] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.063s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.923560] env[63371]: INFO nova.compute.claims [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1574.932023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "refresh_cache-0c9156ea-81c4-4286-a20b-66068a5bce59" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.932023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "refresh_cache-0c9156ea-81c4-4286-a20b-66068a5bce59" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.932023] env[63371]: DEBUG nova.network.neutron [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1575.111841] env[63371]: DEBUG nova.compute.manager [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Received event network-vif-plugged-0ea63013-3179-492e-89dd-074b2ed530c0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1575.111841] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] Acquiring lock "0c9156ea-81c4-4286-a20b-66068a5bce59-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.112173] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] Lock "0c9156ea-81c4-4286-a20b-66068a5bce59-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.112403] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] Lock "0c9156ea-81c4-4286-a20b-66068a5bce59-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1575.113647] env[63371]: DEBUG nova.compute.manager [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] No waiting events found dispatching network-vif-plugged-0ea63013-3179-492e-89dd-074b2ed530c0 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1575.113866] env[63371]: WARNING nova.compute.manager [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Received unexpected event network-vif-plugged-0ea63013-3179-492e-89dd-074b2ed530c0 for instance with vm_state building and task_state spawning. [ 1575.114633] env[63371]: DEBUG nova.compute.manager [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Received event network-changed-0ea63013-3179-492e-89dd-074b2ed530c0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1575.114833] env[63371]: DEBUG nova.compute.manager [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Refreshing instance network info cache due to event network-changed-0ea63013-3179-492e-89dd-074b2ed530c0. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1575.115096] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] Acquiring lock "refresh_cache-0c9156ea-81c4-4286-a20b-66068a5bce59" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.121083] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774236, 'name': ReconfigVM_Task, 'duration_secs': 0.186562} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.121914] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368346', 'volume_id': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'name': 'volume-32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c03e2dc4-75d9-4fbb-afc8-046cbbf908ac', 'attached_at': '', 'detached_at': '', 'volume_id': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'serial': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1575.122648] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-416ee06b-2259-4fe1-9700-d899b63b5cbc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.133258] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Waiting for the task: (returnval){ [ 1575.133258] env[63371]: value = "task-1774237" [ 1575.133258] env[63371]: _type = "Task" [ 1575.133258] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.145573] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774237, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.412833] env[63371]: DEBUG oslo_vmware.api [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774235, 'name': PowerOnVM_Task, 'duration_secs': 0.752861} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.413274] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1575.414080] env[63371]: INFO nova.compute.manager [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Took 9.86 seconds to spawn the instance on the hypervisor. [ 1575.414696] env[63371]: DEBUG nova.compute.manager [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1575.415870] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ad5703-79c5-4d02-89a8-1c82953ba087 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.432827] env[63371]: DEBUG nova.compute.utils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1575.436548] env[63371]: DEBUG nova.compute.manager [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1575.436877] env[63371]: DEBUG nova.network.neutron [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1575.485183] env[63371]: DEBUG nova.network.neutron [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1575.512414] env[63371]: DEBUG nova.policy [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62e2815d3adb44fc8f90e607df9913de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22cee8ba9bdd42a1bb39518839dbd437', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1575.646651] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774237, 'name': Rename_Task, 'duration_secs': 0.182934} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.646918] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1575.647194] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27e7b045-56c1-4dcb-85fb-f00dabae8dba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.655759] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Waiting for the task: (returnval){ [ 1575.655759] env[63371]: value = "task-1774238" [ 1575.655759] env[63371]: _type = "Task" [ 1575.655759] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.665868] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774238, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.691765] env[63371]: DEBUG nova.network.neutron [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Updating instance_info_cache with network_info: [{"id": "0ea63013-3179-492e-89dd-074b2ed530c0", "address": "fa:16:3e:29:98:ce", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ea63013-31", "ovs_interfaceid": "0ea63013-3179-492e-89dd-074b2ed530c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1575.865753] env[63371]: DEBUG nova.network.neutron [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Successfully created port: 0afc5c03-432d-430e-aadd-25d2ee234367 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1575.923609] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "9985dbcd-4498-4629-aae5-5e1933307c50" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.923609] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "9985dbcd-4498-4629-aae5-5e1933307c50" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.946327] env[63371]: DEBUG nova.compute.manager [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1575.950760] env[63371]: INFO nova.compute.manager [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Took 43.84 seconds to build instance. [ 1576.176932] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774238, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.196035] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "refresh_cache-0c9156ea-81c4-4286-a20b-66068a5bce59" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.196035] env[63371]: DEBUG nova.compute.manager [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Instance network_info: |[{"id": "0ea63013-3179-492e-89dd-074b2ed530c0", "address": "fa:16:3e:29:98:ce", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ea63013-31", "ovs_interfaceid": "0ea63013-3179-492e-89dd-074b2ed530c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1576.196035] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] Acquired lock "refresh_cache-0c9156ea-81c4-4286-a20b-66068a5bce59" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.196035] env[63371]: DEBUG nova.network.neutron [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Refreshing network info cache for port 0ea63013-3179-492e-89dd-074b2ed530c0 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1576.196035] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:98:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ea63013-3179-492e-89dd-074b2ed530c0', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1576.206473] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Creating folder: Project (58f967d3770541269fb89f48b3df58c9). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1576.209288] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1c44e84-c985-4cc5-b569-77f19cd160d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.230022] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Created folder: Project (58f967d3770541269fb89f48b3df58c9) in parent group-v368199. [ 1576.231127] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Creating folder: Instances. Parent ref: group-v368383. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1576.231127] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2942542-9cc0-4a7e-ba41-ccea18f9a9ad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.244470] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Created folder: Instances in parent group-v368383. [ 1576.244923] env[63371]: DEBUG oslo.service.loopingcall [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1576.245672] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1576.246118] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5c3fadd1-3b63-410f-a69b-986ed82b8e0e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.275590] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1576.275590] env[63371]: value = "task-1774241" [ 1576.275590] env[63371]: _type = "Task" [ 1576.275590] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.291420] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774241, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.427042] env[63371]: DEBUG nova.compute.manager [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1576.447213] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a23b97a8-b2a3-43bb-97af-49d8d6938733 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.458624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eed05ef7-a256-4e03-96b3-c6a8532474bf tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lock "1cb18f2a-6476-4492-8576-7b0fd693a107" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.107s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1576.459395] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2e1c06-4056-4045-aa87-820dd91b7745 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.494121] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee7b9a5e-fffd-4636-bc37-984655a3f5fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.504040] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9565f4-dc0a-4cad-a325-a1e528049dfc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.518946] env[63371]: DEBUG nova.compute.provider_tree [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1576.669503] env[63371]: DEBUG oslo_vmware.api [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774238, 'name': PowerOnVM_Task, 'duration_secs': 0.630307} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.670591] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1576.670591] env[63371]: INFO nova.compute.manager [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Took 6.59 seconds to spawn the instance on the hypervisor. [ 1576.670591] env[63371]: DEBUG nova.compute.manager [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1576.670951] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45fa7200-6c9e-448c-8953-c75988120bc1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.788639] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774241, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.791065] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c5df23-4743-0bad-3ca0-06573eb5a5f4/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1576.792342] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35f836d-cb74-4b3b-bd23-5f8d5de810fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.798765] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c5df23-4743-0bad-3ca0-06573eb5a5f4/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1576.798897] env[63371]: ERROR oslo_vmware.rw_handles [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c5df23-4743-0bad-3ca0-06573eb5a5f4/disk-0.vmdk due to incomplete transfer. [ 1576.799257] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e319360c-b512-4108-a81d-2b5ee2d2e467 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.807755] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c5df23-4743-0bad-3ca0-06573eb5a5f4/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1576.808091] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Uploaded image 17c4a4a7-e61d-4eb4-a2f8-2fc20bcfe68b to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1576.810338] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1576.810587] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-68cb713a-116a-40da-bf17-962142c2c202 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.819364] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1576.819364] env[63371]: value = "task-1774242" [ 1576.819364] env[63371]: _type = "Task" [ 1576.819364] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.828867] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774242, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.954310] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.961257] env[63371]: DEBUG nova.compute.manager [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1576.989749] env[63371]: DEBUG nova.virt.hardware [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1576.989955] env[63371]: DEBUG nova.virt.hardware [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1576.990154] env[63371]: DEBUG nova.virt.hardware [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1576.990351] env[63371]: DEBUG nova.virt.hardware [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1576.990497] env[63371]: DEBUG nova.virt.hardware [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1576.990926] env[63371]: DEBUG nova.virt.hardware [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1576.990926] env[63371]: DEBUG nova.virt.hardware [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1576.991035] env[63371]: DEBUG nova.virt.hardware [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1576.991143] env[63371]: DEBUG nova.virt.hardware [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1576.991305] env[63371]: DEBUG nova.virt.hardware [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1576.991471] env[63371]: DEBUG nova.virt.hardware [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1576.992347] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404857ef-e528-45ed-bd3a-5631eff745e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.995507] env[63371]: DEBUG nova.network.neutron [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Updated VIF entry in instance network info cache for port 0ea63013-3179-492e-89dd-074b2ed530c0. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1576.995959] env[63371]: DEBUG nova.network.neutron [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Updating instance_info_cache with network_info: [{"id": "0ea63013-3179-492e-89dd-074b2ed530c0", "address": "fa:16:3e:29:98:ce", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ea63013-31", "ovs_interfaceid": "0ea63013-3179-492e-89dd-074b2ed530c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.003111] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72e0b59-ecb7-4dbd-8c8f-10065cc012f5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.021986] env[63371]: DEBUG nova.scheduler.client.report [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1577.197142] env[63371]: INFO nova.compute.manager [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Took 44.34 seconds to build instance. [ 1577.287436] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774241, 'name': CreateVM_Task, 'duration_secs': 0.94315} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.287436] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1577.288340] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.288340] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.288618] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1577.288877] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fdd49172-bf70-43c1-9074-e0466aca66cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.294973] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1577.294973] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52058946-9843-5794-86f3-70cb3e8ea5ce" [ 1577.294973] env[63371]: _type = "Task" [ 1577.294973] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.303525] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52058946-9843-5794-86f3-70cb3e8ea5ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.328734] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774242, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.498609] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] Releasing lock "refresh_cache-0c9156ea-81c4-4286-a20b-66068a5bce59" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.498847] env[63371]: DEBUG nova.compute.manager [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received event network-changed-bc8b891d-040a-4a55-a281-311c08ae828d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1577.499024] env[63371]: DEBUG nova.compute.manager [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Refreshing instance network info cache due to event network-changed-bc8b891d-040a-4a55-a281-311c08ae828d. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1577.499252] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] Acquiring lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.499387] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] Acquired lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.499547] env[63371]: DEBUG nova.network.neutron [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Refreshing network info cache for port bc8b891d-040a-4a55-a281-311c08ae828d {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1577.526653] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.606s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.527233] env[63371]: DEBUG nova.compute.manager [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1577.529753] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.720s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.529970] env[63371]: DEBUG nova.objects.instance [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lazy-loading 'resources' on Instance uuid d6bc618e-33c9-4b45-b79f-afe6811acd4e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1577.569484] env[63371]: DEBUG nova.network.neutron [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Successfully updated port: 0afc5c03-432d-430e-aadd-25d2ee234367 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1577.618752] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.619029] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.619321] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.619529] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.619702] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.621873] env[63371]: INFO nova.compute.manager [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Terminating instance [ 1577.623634] env[63371]: DEBUG nova.compute.manager [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1577.623828] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1577.624710] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6018ff0-79ba-44f2-a23b-1e0e42909a20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.633198] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1577.633802] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d99c0c6-33bb-4dac-a60e-2e4dfc11cf22 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.668064] env[63371]: DEBUG nova.compute.manager [req-162ae2b6-324c-477f-883e-f85f5a5fd154 req-c17ed305-b4a1-497f-8f43-d5d15bf427e2 service nova] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Received event network-vif-plugged-0afc5c03-432d-430e-aadd-25d2ee234367 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1577.669022] env[63371]: DEBUG oslo_concurrency.lockutils [req-162ae2b6-324c-477f-883e-f85f5a5fd154 req-c17ed305-b4a1-497f-8f43-d5d15bf427e2 service nova] Acquiring lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.669169] env[63371]: DEBUG oslo_concurrency.lockutils [req-162ae2b6-324c-477f-883e-f85f5a5fd154 req-c17ed305-b4a1-497f-8f43-d5d15bf427e2 service nova] Lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.670210] env[63371]: DEBUG oslo_concurrency.lockutils [req-162ae2b6-324c-477f-883e-f85f5a5fd154 req-c17ed305-b4a1-497f-8f43-d5d15bf427e2 service nova] Lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.670210] env[63371]: DEBUG nova.compute.manager [req-162ae2b6-324c-477f-883e-f85f5a5fd154 req-c17ed305-b4a1-497f-8f43-d5d15bf427e2 service nova] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] No waiting events found dispatching network-vif-plugged-0afc5c03-432d-430e-aadd-25d2ee234367 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1577.670210] env[63371]: WARNING nova.compute.manager [req-162ae2b6-324c-477f-883e-f85f5a5fd154 req-c17ed305-b4a1-497f-8f43-d5d15bf427e2 service nova] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Received unexpected event network-vif-plugged-0afc5c03-432d-430e-aadd-25d2ee234367 for instance with vm_state building and task_state spawning. [ 1577.699453] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784f6a35-7a7f-45db-9532-74d7b022f5c3 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.663s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1577.716549] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1577.716824] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1577.717100] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleting the datastore file [datastore1] 704978f9-3b24-4a73-8f64-b8e3e9e94a04 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1577.717440] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce7fe342-c9ed-4988-b6c2-447274b86335 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.727240] env[63371]: DEBUG oslo_vmware.api [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1577.727240] env[63371]: value = "task-1774244" [ 1577.727240] env[63371]: _type = "Task" [ 1577.727240] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.739112] env[63371]: DEBUG oslo_vmware.api [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774244, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.810040] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52058946-9843-5794-86f3-70cb3e8ea5ce, 'name': SearchDatastore_Task, 'duration_secs': 0.041358} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.810404] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.810668] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1577.810919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.811105] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.811336] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1577.811635] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec81c33d-c91e-4666-8371-fc18cc757a8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.824858] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1577.824975] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1577.826213] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fe5d262-d23e-43ef-a6a3-b7e3583664e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.832410] env[63371]: DEBUG oslo_vmware.api [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774242, 'name': Destroy_Task, 'duration_secs': 0.957609} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.832773] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Destroyed the VM [ 1577.833020] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1577.833285] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-89f7a25b-5344-423d-a112-f00cf80432dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.836817] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1577.836817] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e17e95-09c5-ea4e-8524-b7584e0a5b39" [ 1577.836817] env[63371]: _type = "Task" [ 1577.836817] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.841270] env[63371]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1577.841484] env[63371]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=63371) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1577.842337] env[63371]: DEBUG nova.compute.utils [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Cleaning up image 17c4a4a7-e61d-4eb4-a2f8-2fc20bcfe68b {{(pid=63371) delete_image /opt/stack/nova/nova/compute/utils.py:1322}} [ 1577.851218] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e17e95-09c5-ea4e-8524-b7584e0a5b39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.035447] env[63371]: DEBUG nova.compute.utils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1578.041854] env[63371]: DEBUG nova.compute.manager [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1578.041854] env[63371]: DEBUG nova.network.neutron [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1578.074015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquiring lock "refresh_cache-1276e001-fb07-4367-8b03-81c5fe5fbd0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.074185] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquired lock "refresh_cache-1276e001-fb07-4367-8b03-81c5fe5fbd0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.074336] env[63371]: DEBUG nova.network.neutron [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1578.134443] env[63371]: DEBUG nova.policy [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31b76ca90f31495287b332ebb3001dff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e96348bcfea1455dad72945c7c36f027', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1578.243975] env[63371]: WARNING nova.virt.vmwareapi.vmops [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] In vmwareapi:vmops:_destroy_instance, exception while deleting the VM contents from the disk: oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore1] 704978f9-3b24-4a73-8f64-b8e3e9e94a04 [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Traceback (most recent call last): [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1113, in _destroy_instance [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] ds_util.file_delete(self._session, [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] File "/opt/stack/nova/nova/virt/vmwareapi/ds_util.py", line 219, in file_delete [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] session._wait_for_task(file_delete_task) [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] return self.wait_for_task(task_ref) [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] return evt.wait() [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] result = hub.switch() [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] return self.greenlet.switch() [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] self.f(*self.args, **self.kw) [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] raise exceptions.translate_fault(task_info.error) [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] oslo_vmware.exceptions.CannotDeleteFileException: Cannot delete file [datastore1] 704978f9-3b24-4a73-8f64-b8e3e9e94a04 [ 1578.243975] env[63371]: ERROR nova.virt.vmwareapi.vmops [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] [ 1578.244739] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1578.244739] env[63371]: INFO nova.compute.manager [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1578.244987] env[63371]: DEBUG oslo.service.loopingcall [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1578.247613] env[63371]: DEBUG nova.compute.manager [-] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1578.247706] env[63371]: DEBUG nova.network.neutron [-] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1578.291583] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "e781866e-9b26-47c7-b1a6-d6d9547bf2fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1578.291583] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "e781866e-9b26-47c7-b1a6-d6d9547bf2fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.356167] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e17e95-09c5-ea4e-8524-b7584e0a5b39, 'name': SearchDatastore_Task, 'duration_secs': 0.015121} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.357416] env[63371]: DEBUG nova.network.neutron [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updated VIF entry in instance network info cache for port bc8b891d-040a-4a55-a281-311c08ae828d. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1578.357529] env[63371]: DEBUG nova.network.neutron [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updating instance_info_cache with network_info: [{"id": "bc8b891d-040a-4a55-a281-311c08ae828d", "address": "fa:16:3e:ea:27:0c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8b891d-04", "ovs_interfaceid": "bc8b891d-040a-4a55-a281-311c08ae828d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.361277] env[63371]: DEBUG nova.compute.manager [req-337611af-9296-4bc0-94f6-47c6a017b273 req-a706eb23-9051-40d2-8468-acef11b6febd service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Received event network-changed-ca5ead57-035d-446f-8117-2c2374008be8 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1578.361501] env[63371]: DEBUG nova.compute.manager [req-337611af-9296-4bc0-94f6-47c6a017b273 req-a706eb23-9051-40d2-8468-acef11b6febd service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Refreshing instance network info cache due to event network-changed-ca5ead57-035d-446f-8117-2c2374008be8. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1578.361682] env[63371]: DEBUG oslo_concurrency.lockutils [req-337611af-9296-4bc0-94f6-47c6a017b273 req-a706eb23-9051-40d2-8468-acef11b6febd service nova] Acquiring lock "refresh_cache-c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.361826] env[63371]: DEBUG oslo_concurrency.lockutils [req-337611af-9296-4bc0-94f6-47c6a017b273 req-a706eb23-9051-40d2-8468-acef11b6febd service nova] Acquired lock "refresh_cache-c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.362023] env[63371]: DEBUG nova.network.neutron [req-337611af-9296-4bc0-94f6-47c6a017b273 req-a706eb23-9051-40d2-8468-acef11b6febd service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Refreshing network info cache for port ca5ead57-035d-446f-8117-2c2374008be8 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1578.363666] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22116f40-217a-4b21-8822-12c9cab0b63f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.373802] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1578.373802] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5226d85a-503b-f032-9601-e9c98ff23bfd" [ 1578.373802] env[63371]: _type = "Task" [ 1578.373802] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.391050] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5226d85a-503b-f032-9601-e9c98ff23bfd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.496527] env[63371]: DEBUG nova.network.neutron [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Successfully created port: a25f7a2e-b96f-4966-a665-76f86d05a00d {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1578.543758] env[63371]: DEBUG nova.compute.manager [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1578.604866] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23fcee43-f8e1-44b2-9c92-4e3a86b698e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.612551] env[63371]: DEBUG nova.network.neutron [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1578.617323] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9c0e08-43c0-412a-a6d2-1a366a39bba6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.651169] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc6bb86-cfcf-485c-a9f1-d269a8c6ccda {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.659518] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a382c3d-33ae-4f2c-bcdd-355bdf9897ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.675816] env[63371]: DEBUG nova.compute.provider_tree [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1578.798653] env[63371]: DEBUG nova.compute.manager [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1578.819250] env[63371]: DEBUG nova.network.neutron [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Updating instance_info_cache with network_info: [{"id": "0afc5c03-432d-430e-aadd-25d2ee234367", "address": "fa:16:3e:a3:11:6a", "network": {"id": "1fe9851f-d41c-46f4-857b-a15e88b76efe", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-162458160-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22cee8ba9bdd42a1bb39518839dbd437", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0afc5c03-43", "ovs_interfaceid": "0afc5c03-432d-430e-aadd-25d2ee234367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.864943] env[63371]: DEBUG oslo_concurrency.lockutils [req-edd81d02-4b41-4d88-9169-07d088b54950 req-3e0aad4b-e8ac-4044-b65b-25f9d3d0c3c4 service nova] Releasing lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.890516] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5226d85a-503b-f032-9601-e9c98ff23bfd, 'name': SearchDatastore_Task, 'duration_secs': 0.020825} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.890690] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.891238] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0c9156ea-81c4-4286-a20b-66068a5bce59/0c9156ea-81c4-4286-a20b-66068a5bce59.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1578.891586] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-967b2b7e-902d-44b9-9d29-e5ca47a896c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.901584] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1578.901584] env[63371]: value = "task-1774245" [ 1578.901584] env[63371]: _type = "Task" [ 1578.901584] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.912657] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774245, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.068848] env[63371]: DEBUG nova.network.neutron [-] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.184161] env[63371]: DEBUG nova.scheduler.client.report [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1579.210969] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquiring lock "1cb18f2a-6476-4492-8576-7b0fd693a107" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.210969] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lock "1cb18f2a-6476-4492-8576-7b0fd693a107" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.210969] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquiring lock "1cb18f2a-6476-4492-8576-7b0fd693a107-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.210969] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lock "1cb18f2a-6476-4492-8576-7b0fd693a107-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.210969] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lock "1cb18f2a-6476-4492-8576-7b0fd693a107-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.212718] env[63371]: INFO nova.compute.manager [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Terminating instance [ 1579.216073] env[63371]: DEBUG nova.compute.manager [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1579.216662] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1579.217721] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9def79-7026-42bf-bc6e-a15f4d2e2d92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.232233] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1579.232233] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e58fa5e7-11f9-42e2-9fdc-0d0a7855db6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.244682] env[63371]: DEBUG oslo_vmware.api [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Waiting for the task: (returnval){ [ 1579.244682] env[63371]: value = "task-1774246" [ 1579.244682] env[63371]: _type = "Task" [ 1579.244682] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.256613] env[63371]: DEBUG oslo_vmware.api [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774246, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.323462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.324452] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Releasing lock "refresh_cache-1276e001-fb07-4367-8b03-81c5fe5fbd0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.325379] env[63371]: DEBUG nova.compute.manager [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Instance network_info: |[{"id": "0afc5c03-432d-430e-aadd-25d2ee234367", "address": "fa:16:3e:a3:11:6a", "network": {"id": "1fe9851f-d41c-46f4-857b-a15e88b76efe", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-162458160-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22cee8ba9bdd42a1bb39518839dbd437", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0afc5c03-43", "ovs_interfaceid": "0afc5c03-432d-430e-aadd-25d2ee234367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1579.329019] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a3:11:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35ac9709-fd8b-4630-897a-68ed629d1b11', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0afc5c03-432d-430e-aadd-25d2ee234367', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1579.336569] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Creating folder: Project (22cee8ba9bdd42a1bb39518839dbd437). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1579.340413] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2352bb1d-23f5-4f6c-a162-1af531834330 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.359030] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Created folder: Project (22cee8ba9bdd42a1bb39518839dbd437) in parent group-v368199. [ 1579.359030] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Creating folder: Instances. Parent ref: group-v368386. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1579.359030] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32fa3de6-1458-4ab8-9674-c077d8d4d13c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.372598] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Created folder: Instances in parent group-v368386. [ 1579.374026] env[63371]: DEBUG oslo.service.loopingcall [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1579.374026] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1579.374026] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fb55b19-f88d-4ebb-85b1-b74f09d519a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.394350] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.403635] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1579.403635] env[63371]: value = "task-1774249" [ 1579.403635] env[63371]: _type = "Task" [ 1579.403635] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.426074] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774249, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.428125] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774245, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.477196] env[63371]: DEBUG nova.network.neutron [req-337611af-9296-4bc0-94f6-47c6a017b273 req-a706eb23-9051-40d2-8468-acef11b6febd service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Updated VIF entry in instance network info cache for port ca5ead57-035d-446f-8117-2c2374008be8. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1579.477344] env[63371]: DEBUG nova.network.neutron [req-337611af-9296-4bc0-94f6-47c6a017b273 req-a706eb23-9051-40d2-8468-acef11b6febd service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Updating instance_info_cache with network_info: [{"id": "ca5ead57-035d-446f-8117-2c2374008be8", "address": "fa:16:3e:27:7e:a3", "network": {"id": "85c5014e-02e5-457c-b241-aab48881a0a2", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-266845225-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d2fecd600f6447dab2440df8abfc83be", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bec1528b-3e87-477b-8ab2-02696ad47e66", "external-id": "nsx-vlan-transportzone-180", "segmentation_id": 180, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca5ead57-03", "ovs_interfaceid": "ca5ead57-035d-446f-8117-2c2374008be8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1579.559873] env[63371]: DEBUG nova.compute.manager [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1579.573813] env[63371]: INFO nova.compute.manager [-] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Took 1.33 seconds to deallocate network for instance. [ 1579.586958] env[63371]: DEBUG nova.virt.hardware [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1579.587375] env[63371]: DEBUG nova.virt.hardware [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1579.587375] env[63371]: DEBUG nova.virt.hardware [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1579.588135] env[63371]: DEBUG nova.virt.hardware [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1579.588135] env[63371]: DEBUG nova.virt.hardware [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1579.588135] env[63371]: DEBUG nova.virt.hardware [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1579.588341] env[63371]: DEBUG nova.virt.hardware [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1579.588397] env[63371]: DEBUG nova.virt.hardware [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1579.588524] env[63371]: DEBUG nova.virt.hardware [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1579.588701] env[63371]: DEBUG nova.virt.hardware [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1579.588866] env[63371]: DEBUG nova.virt.hardware [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1579.589754] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd75c81c-c9bb-45c7-94dc-5c763fb1532d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.598411] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2759c03e-d433-4c3c-aef1-32e689b56176 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.688312] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.158s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.691253] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.687s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.692112] env[63371]: DEBUG nova.objects.instance [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lazy-loading 'resources' on Instance uuid 574121c4-c721-4d30-81ec-3f2310a7b6d1 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1579.706790] env[63371]: DEBUG nova.compute.manager [req-2244da05-8858-4883-b30f-f1f1ec0e865d req-9bac802b-e156-4570-a3c7-ed1b92699dc3 service nova] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Received event network-changed-0afc5c03-432d-430e-aadd-25d2ee234367 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1579.706850] env[63371]: DEBUG nova.compute.manager [req-2244da05-8858-4883-b30f-f1f1ec0e865d req-9bac802b-e156-4570-a3c7-ed1b92699dc3 service nova] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Refreshing instance network info cache due to event network-changed-0afc5c03-432d-430e-aadd-25d2ee234367. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1579.707176] env[63371]: DEBUG oslo_concurrency.lockutils [req-2244da05-8858-4883-b30f-f1f1ec0e865d req-9bac802b-e156-4570-a3c7-ed1b92699dc3 service nova] Acquiring lock "refresh_cache-1276e001-fb07-4367-8b03-81c5fe5fbd0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.707397] env[63371]: DEBUG oslo_concurrency.lockutils [req-2244da05-8858-4883-b30f-f1f1ec0e865d req-9bac802b-e156-4570-a3c7-ed1b92699dc3 service nova] Acquired lock "refresh_cache-1276e001-fb07-4367-8b03-81c5fe5fbd0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.707397] env[63371]: DEBUG nova.network.neutron [req-2244da05-8858-4883-b30f-f1f1ec0e865d req-9bac802b-e156-4570-a3c7-ed1b92699dc3 service nova] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Refreshing network info cache for port 0afc5c03-432d-430e-aadd-25d2ee234367 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1579.715547] env[63371]: INFO nova.scheduler.client.report [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Deleted allocations for instance d6bc618e-33c9-4b45-b79f-afe6811acd4e [ 1579.757198] env[63371]: DEBUG oslo_vmware.api [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774246, 'name': PowerOffVM_Task, 'duration_secs': 0.478688} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.757198] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1579.757198] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1579.757198] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e7fe9d5-a907-432e-bf52-2a22ce5c0ef0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.855267] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1579.855538] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1579.855721] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Deleting the datastore file [datastore1] 1cb18f2a-6476-4492-8576-7b0fd693a107 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1579.855994] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12c5b084-2cf3-4da9-b34d-76b2bad948fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.863862] env[63371]: DEBUG oslo_vmware.api [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Waiting for the task: (returnval){ [ 1579.863862] env[63371]: value = "task-1774251" [ 1579.863862] env[63371]: _type = "Task" [ 1579.863862] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.872116] env[63371]: DEBUG oslo_vmware.api [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774251, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.914304] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774245, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619018} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.916272] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0c9156ea-81c4-4286-a20b-66068a5bce59/0c9156ea-81c4-4286-a20b-66068a5bce59.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1579.916272] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1579.916272] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e1ef824-65d3-4f72-9622-c2f9aa843354 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.921106] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774249, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.927854] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1579.927854] env[63371]: value = "task-1774252" [ 1579.927854] env[63371]: _type = "Task" [ 1579.927854] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.937984] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774252, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.982754] env[63371]: DEBUG oslo_concurrency.lockutils [req-337611af-9296-4bc0-94f6-47c6a017b273 req-a706eb23-9051-40d2-8468-acef11b6febd service nova] Releasing lock "refresh_cache-c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.082804] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.192221] env[63371]: DEBUG nova.network.neutron [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Successfully updated port: a25f7a2e-b96f-4966-a665-76f86d05a00d {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1580.225752] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cab46422-0761-4128-b1a3-9d07457326d9 tempest-ServerPasswordTestJSON-243477401 tempest-ServerPasswordTestJSON-243477401-project-member] Lock "d6bc618e-33c9-4b45-b79f-afe6811acd4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.487s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.379938] env[63371]: DEBUG oslo_vmware.api [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Task: {'id': task-1774251, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.391921} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.380214] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1580.380509] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1580.380590] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1580.380954] env[63371]: INFO nova.compute.manager [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1580.380954] env[63371]: DEBUG oslo.service.loopingcall [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1580.383463] env[63371]: DEBUG nova.compute.manager [-] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1580.383572] env[63371]: DEBUG nova.network.neutron [-] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1580.387904] env[63371]: DEBUG nova.compute.manager [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Received event network-vif-plugged-a25f7a2e-b96f-4966-a665-76f86d05a00d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1580.388037] env[63371]: DEBUG oslo_concurrency.lockutils [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] Acquiring lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.392019] env[63371]: DEBUG oslo_concurrency.lockutils [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] Lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.392019] env[63371]: DEBUG oslo_concurrency.lockutils [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] Lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.392019] env[63371]: DEBUG nova.compute.manager [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] No waiting events found dispatching network-vif-plugged-a25f7a2e-b96f-4966-a665-76f86d05a00d {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1580.392019] env[63371]: WARNING nova.compute.manager [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Received unexpected event network-vif-plugged-a25f7a2e-b96f-4966-a665-76f86d05a00d for instance with vm_state building and task_state spawning. [ 1580.392019] env[63371]: DEBUG nova.compute.manager [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Received event network-changed-a25f7a2e-b96f-4966-a665-76f86d05a00d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1580.392019] env[63371]: DEBUG nova.compute.manager [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Refreshing instance network info cache due to event network-changed-a25f7a2e-b96f-4966-a665-76f86d05a00d. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1580.392019] env[63371]: DEBUG oslo_concurrency.lockutils [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] Acquiring lock "refresh_cache-12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.392019] env[63371]: DEBUG oslo_concurrency.lockutils [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] Acquired lock "refresh_cache-12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.392019] env[63371]: DEBUG nova.network.neutron [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Refreshing network info cache for port a25f7a2e-b96f-4966-a665-76f86d05a00d {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1580.419371] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774249, 'name': CreateVM_Task, 'duration_secs': 0.683853} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.421285] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1580.422237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.422478] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.422786] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1580.423711] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07267d09-e4f7-4cd2-ae14-34dce349cb55 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.428990] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Waiting for the task: (returnval){ [ 1580.428990] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526e0cf0-4fe9-8dd1-0a52-d654d765dae6" [ 1580.428990] env[63371]: _type = "Task" [ 1580.428990] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.444243] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774252, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08997} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.447961] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1580.448305] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526e0cf0-4fe9-8dd1-0a52-d654d765dae6, 'name': SearchDatastore_Task, 'duration_secs': 0.010294} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.448982] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2f682e-4ab7-4db4-9180-a7338f373391 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.451440] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.451658] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1580.451881] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.452101] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1580.452226] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1580.454564] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-93c330da-834b-433e-9d56-dc0c1777a500 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.476757] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] 0c9156ea-81c4-4286-a20b-66068a5bce59/0c9156ea-81c4-4286-a20b-66068a5bce59.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1580.478579] env[63371]: DEBUG nova.network.neutron [req-2244da05-8858-4883-b30f-f1f1ec0e865d req-9bac802b-e156-4570-a3c7-ed1b92699dc3 service nova] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Updated VIF entry in instance network info cache for port 0afc5c03-432d-430e-aadd-25d2ee234367. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1580.478827] env[63371]: DEBUG nova.network.neutron [req-2244da05-8858-4883-b30f-f1f1ec0e865d req-9bac802b-e156-4570-a3c7-ed1b92699dc3 service nova] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Updating instance_info_cache with network_info: [{"id": "0afc5c03-432d-430e-aadd-25d2ee234367", "address": "fa:16:3e:a3:11:6a", "network": {"id": "1fe9851f-d41c-46f4-857b-a15e88b76efe", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-162458160-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22cee8ba9bdd42a1bb39518839dbd437", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35ac9709-fd8b-4630-897a-68ed629d1b11", "external-id": "nsx-vlan-transportzone-284", "segmentation_id": 284, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0afc5c03-43", "ovs_interfaceid": "0afc5c03-432d-430e-aadd-25d2ee234367", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.482375] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bcb10d3-9a63-47f5-b654-168c45592673 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.497392] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1580.497568] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1580.499256] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75f01365-371f-49c4-bf99-23f56dc0eeec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.506182] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Waiting for the task: (returnval){ [ 1580.506182] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5202e8e5-ebf0-0fb3-aa99-5a0396b7c0a5" [ 1580.506182] env[63371]: _type = "Task" [ 1580.506182] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.513101] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1580.513101] env[63371]: value = "task-1774253" [ 1580.513101] env[63371]: _type = "Task" [ 1580.513101] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.520031] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5202e8e5-ebf0-0fb3-aa99-5a0396b7c0a5, 'name': SearchDatastore_Task, 'duration_secs': 0.010188} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.521875] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-075931ed-b2ec-4e56-8057-cbf5b75e622e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.529667] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774253, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.533484] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Waiting for the task: (returnval){ [ 1580.533484] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521412b8-1a1b-f6b6-4fe3-67f654c261cd" [ 1580.533484] env[63371]: _type = "Task" [ 1580.533484] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.541591] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521412b8-1a1b-f6b6-4fe3-67f654c261cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.686497] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e04666d-a8fc-42a8-91da-c20e6e26afce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.695278] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba73af6c-ac42-4e13-86e4-be7125bc96c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.700797] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "refresh_cache-12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1580.734357] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e628a7f3-e6a3-4ec2-89d9-1f44480c4187 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.743351] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-324ffc75-8184-4474-a20a-5bde2d491d37 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.758117] env[63371]: DEBUG nova.compute.provider_tree [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1580.934747] env[63371]: DEBUG nova.network.neutron [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1580.999838] env[63371]: DEBUG oslo_concurrency.lockutils [req-2244da05-8858-4883-b30f-f1f1ec0e865d req-9bac802b-e156-4570-a3c7-ed1b92699dc3 service nova] Releasing lock "refresh_cache-1276e001-fb07-4367-8b03-81c5fe5fbd0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.000110] env[63371]: DEBUG nova.compute.manager [req-2244da05-8858-4883-b30f-f1f1ec0e865d req-9bac802b-e156-4570-a3c7-ed1b92699dc3 service nova] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Received event network-vif-deleted-bc802b6c-1a40-491b-8222-aa71e5d0bcd3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1581.023831] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774253, 'name': ReconfigVM_Task, 'duration_secs': 0.31102} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.024201] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Reconfigured VM instance instance-00000041 to attach disk [datastore1] 0c9156ea-81c4-4286-a20b-66068a5bce59/0c9156ea-81c4-4286-a20b-66068a5bce59.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1581.024834] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e2616d9-6f65-4eb5-8d7a-7758efe5647b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.032085] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1581.032085] env[63371]: value = "task-1774254" [ 1581.032085] env[63371]: _type = "Task" [ 1581.032085] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.043981] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774254, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.047690] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521412b8-1a1b-f6b6-4fe3-67f654c261cd, 'name': SearchDatastore_Task, 'duration_secs': 0.010516} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.047933] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.048203] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1276e001-fb07-4367-8b03-81c5fe5fbd0d/1276e001-fb07-4367-8b03-81c5fe5fbd0d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1581.048447] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8238ecf-15fe-49d8-a03f-ccb1101e4a0f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.058374] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Waiting for the task: (returnval){ [ 1581.058374] env[63371]: value = "task-1774255" [ 1581.058374] env[63371]: _type = "Task" [ 1581.058374] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.068500] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.085060] env[63371]: DEBUG nova.network.neutron [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1581.282667] env[63371]: ERROR nova.scheduler.client.report [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [req-55402dc4-c99b-493f-bd84-3b0a699bbd0b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-55402dc4-c99b-493f-bd84-3b0a699bbd0b"}]} [ 1581.291903] env[63371]: DEBUG nova.network.neutron [-] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1581.303044] env[63371]: DEBUG nova.scheduler.client.report [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1581.321426] env[63371]: DEBUG nova.scheduler.client.report [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1581.322062] env[63371]: DEBUG nova.compute.provider_tree [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1581.335888] env[63371]: DEBUG nova.scheduler.client.report [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1581.364608] env[63371]: DEBUG nova.scheduler.client.report [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1581.551726] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774254, 'name': Rename_Task, 'duration_secs': 0.152662} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.554735] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1581.555402] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-26281464-067e-49fb-bef4-e95113c0182f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.563881] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1581.563881] env[63371]: value = "task-1774256" [ 1581.563881] env[63371]: _type = "Task" [ 1581.563881] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.574673] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774255, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.580237] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774256, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.588304] env[63371]: DEBUG oslo_concurrency.lockutils [req-b2b765d5-d116-4cac-a313-ccfb474062ab req-830f6efc-d610-43ca-b081-25099da51765 service nova] Releasing lock "refresh_cache-12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.588476] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "refresh_cache-12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.588556] env[63371]: DEBUG nova.network.neutron [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1581.794619] env[63371]: INFO nova.compute.manager [-] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Took 1.41 seconds to deallocate network for instance. [ 1581.819931] env[63371]: DEBUG nova.compute.manager [req-0c57ea4a-933a-4e31-9ed4-97851051ec2a req-913c4af8-f70b-4321-8b3a-82e10593a851 service nova] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Received event network-vif-deleted-8f925cfd-c5f7-4a4f-8782-bea15764877a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1581.869018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa9c846-f258-4257-9e0d-e34b5d5e03c1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.877461] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f836d4db-4d9b-4a0a-9948-aef98db5a9c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.911135] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7151ce-7c88-4500-a0b2-ee3589e3baf4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.920291] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd18dd52-dc6f-4bbe-8121-c46bde30df9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.936182] env[63371]: DEBUG nova.compute.provider_tree [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1582.072118] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774255, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.532799} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.073264] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1276e001-fb07-4367-8b03-81c5fe5fbd0d/1276e001-fb07-4367-8b03-81c5fe5fbd0d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1582.073537] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1582.073794] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c75b04f5-a7ba-4e7a-b6bc-a01b37d369a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.079084] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774256, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.086948] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Waiting for the task: (returnval){ [ 1582.086948] env[63371]: value = "task-1774257" [ 1582.086948] env[63371]: _type = "Task" [ 1582.086948] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.097063] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774257, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.138809] env[63371]: DEBUG nova.network.neutron [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1582.301857] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1582.402435] env[63371]: DEBUG nova.network.neutron [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Updating instance_info_cache with network_info: [{"id": "a25f7a2e-b96f-4966-a665-76f86d05a00d", "address": "fa:16:3e:ac:3c:cc", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa25f7a2e-b9", "ovs_interfaceid": "a25f7a2e-b96f-4966-a665-76f86d05a00d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.469726] env[63371]: DEBUG nova.scheduler.client.report [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 89 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1582.469986] env[63371]: DEBUG nova.compute.provider_tree [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 89 to 90 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1582.470194] env[63371]: DEBUG nova.compute.provider_tree [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1582.577034] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774256, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.597623] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774257, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072313} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.597901] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1582.599036] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504d72db-7fb4-48dc-b85c-6d435b4350c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.622952] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 1276e001-fb07-4367-8b03-81c5fe5fbd0d/1276e001-fb07-4367-8b03-81c5fe5fbd0d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1582.623635] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fba9aa97-f520-4a62-b4ea-b0e46226cb35 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.644583] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Waiting for the task: (returnval){ [ 1582.644583] env[63371]: value = "task-1774258" [ 1582.644583] env[63371]: _type = "Task" [ 1582.644583] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.653504] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774258, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.905970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "refresh_cache-12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.906253] env[63371]: DEBUG nova.compute.manager [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Instance network_info: |[{"id": "a25f7a2e-b96f-4966-a665-76f86d05a00d", "address": "fa:16:3e:ac:3c:cc", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa25f7a2e-b9", "ovs_interfaceid": "a25f7a2e-b96f-4966-a665-76f86d05a00d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1582.906763] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:3c:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a25f7a2e-b96f-4966-a665-76f86d05a00d', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1582.914849] env[63371]: DEBUG oslo.service.loopingcall [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1582.915098] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1582.915332] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad8140c9-d9d9-41fa-b820-186934ee9665 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.936804] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1582.936804] env[63371]: value = "task-1774259" [ 1582.936804] env[63371]: _type = "Task" [ 1582.936804] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.946849] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774259, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.977143] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.286s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.979882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.412s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.981719] env[63371]: INFO nova.compute.claims [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1583.005102] env[63371]: INFO nova.scheduler.client.report [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted allocations for instance 574121c4-c721-4d30-81ec-3f2310a7b6d1 [ 1583.076931] env[63371]: DEBUG oslo_vmware.api [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774256, 'name': PowerOnVM_Task, 'duration_secs': 1.06017} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.077566] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1583.077566] env[63371]: INFO nova.compute.manager [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Took 9.96 seconds to spawn the instance on the hypervisor. [ 1583.077681] env[63371]: DEBUG nova.compute.manager [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1583.078422] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77476103-0470-42ba-a015-3053d4ef2067 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.156266] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774258, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.447600] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774259, 'name': CreateVM_Task, 'duration_secs': 0.460626} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.447777] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1583.448481] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.448646] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.448961] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1583.449297] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9653668f-da1d-4dba-a519-12b2514b2d93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.454499] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1583.454499] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522862ec-7e5b-5eaa-9cc2-ff82f21326a1" [ 1583.454499] env[63371]: _type = "Task" [ 1583.454499] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.463037] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522862ec-7e5b-5eaa-9cc2-ff82f21326a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.516745] env[63371]: DEBUG oslo_concurrency.lockutils [None req-609c83cc-9266-41e3-a5cf-d04a0e9cc2ac tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "574121c4-c721-4d30-81ec-3f2310a7b6d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.502s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.598829] env[63371]: INFO nova.compute.manager [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Took 46.70 seconds to build instance. [ 1583.657289] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774258, 'name': ReconfigVM_Task, 'duration_secs': 0.520614} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.657572] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 1276e001-fb07-4367-8b03-81c5fe5fbd0d/1276e001-fb07-4367-8b03-81c5fe5fbd0d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1583.658263] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c9e953dc-3d31-4e42-8307-675a3f379ed9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.666580] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Waiting for the task: (returnval){ [ 1583.666580] env[63371]: value = "task-1774260" [ 1583.666580] env[63371]: _type = "Task" [ 1583.666580] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.680910] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774260, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.966652] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522862ec-7e5b-5eaa-9cc2-ff82f21326a1, 'name': SearchDatastore_Task, 'duration_secs': 0.046139} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.966971] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.967238] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1583.967468] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1583.967611] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1583.967794] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1583.968118] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a378880b-911f-4913-94e1-51cbe02f9591 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.977353] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1583.977527] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1583.978250] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8c63707f-a26b-4708-ae69-5a90ce7f862f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.983828] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1583.983828] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9cb84-129d-26b5-3ecd-9d32688880de" [ 1583.983828] env[63371]: _type = "Task" [ 1583.983828] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.995010] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9cb84-129d-26b5-3ecd-9d32688880de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.101375] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b3170f3b-3469-4a73-adc8-0fe8a530e26f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "0c9156ea-81c4-4286-a20b-66068a5bce59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.212s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.178499] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774260, 'name': Rename_Task, 'duration_secs': 0.265759} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.181423] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1584.181867] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-678819cd-f11b-41b0-ada2-b3e922238586 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.189497] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Waiting for the task: (returnval){ [ 1584.189497] env[63371]: value = "task-1774261" [ 1584.189497] env[63371]: _type = "Task" [ 1584.189497] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.201485] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774261, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.380620] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4e7896e-7266-47e7-a39d-cd7ad614b704 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.389193] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e999e9-4ff1-44bd-98a4-01e1d1c1112d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.420649] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cc2aa5-0498-40e6-a072-01c3253199ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.429718] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d4150d-fd4a-45ac-9613-9122fb1f9dde {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.445783] env[63371]: DEBUG nova.compute.provider_tree [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1584.496718] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9cb84-129d-26b5-3ecd-9d32688880de, 'name': SearchDatastore_Task, 'duration_secs': 0.016187} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.497564] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-757ba1d8-1b3f-4cd3-a338-153dc1150610 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.504146] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1584.504146] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5240f8bc-350c-1ad0-cc10-c19521bab8e7" [ 1584.504146] env[63371]: _type = "Task" [ 1584.504146] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.513247] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5240f8bc-350c-1ad0-cc10-c19521bab8e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.702290] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774261, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.948928] env[63371]: DEBUG nova.scheduler.client.report [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1585.015849] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5240f8bc-350c-1ad0-cc10-c19521bab8e7, 'name': SearchDatastore_Task, 'duration_secs': 0.014386} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.016148] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.016409] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956/12e393d7-e8d5-4a9a-bad7-3cfffbb9d956.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1585.016676] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-198a3e5c-bea1-4a44-a2e2-164e32fb6d6f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.024841] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1585.024841] env[63371]: value = "task-1774262" [ 1585.024841] env[63371]: _type = "Task" [ 1585.024841] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.033763] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.200856] env[63371]: DEBUG oslo_vmware.api [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774261, 'name': PowerOnVM_Task, 'duration_secs': 1.007904} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.201149] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1585.201389] env[63371]: INFO nova.compute.manager [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Took 8.24 seconds to spawn the instance on the hypervisor. [ 1585.201573] env[63371]: DEBUG nova.compute.manager [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1585.202391] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad810e8-2e05-4fcf-83d7-6c41c1c08a4d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.457804] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.478s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.458359] env[63371]: DEBUG nova.compute.manager [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1585.462196] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.257s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.462539] env[63371]: DEBUG nova.objects.instance [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lazy-loading 'resources' on Instance uuid 47c1c242-d190-4523-8033-307c5a9b7535 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1585.537760] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774262, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.560584] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "3027832f-12cd-4255-b699-bcbb254a6c5a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1585.561291] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "3027832f-12cd-4255-b699-bcbb254a6c5a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.719322] env[63371]: INFO nova.compute.manager [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Took 46.38 seconds to build instance. [ 1585.969424] env[63371]: DEBUG nova.compute.utils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1585.971054] env[63371]: DEBUG nova.compute.manager [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1585.971265] env[63371]: DEBUG nova.network.neutron [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1586.036019] env[63371]: DEBUG nova.policy [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e88115fd7e2541e08000e93ef9ab0524', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6919de51a2ef456db7a25d4cec1e26ad', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1586.040786] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774262, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.054102] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquiring lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.066874] env[63371]: DEBUG nova.compute.manager [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1586.224981] env[63371]: DEBUG oslo_concurrency.lockutils [None req-084b584f-f6cd-435a-9be1-b4b277c2c59d tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.788s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.225528] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.171s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.225792] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquiring lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.225947] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1586.226243] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1586.231197] env[63371]: INFO nova.compute.manager [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Terminating instance [ 1586.232611] env[63371]: DEBUG nova.compute.manager [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1586.232804] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1586.233692] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293e597e-5738-47b1-ba64-bb020dd6c91a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.246297] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1586.246600] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30a5928b-4506-4634-806f-0bf6b52bbafb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.256144] env[63371]: DEBUG oslo_vmware.api [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Waiting for the task: (returnval){ [ 1586.256144] env[63371]: value = "task-1774263" [ 1586.256144] env[63371]: _type = "Task" [ 1586.256144] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.264430] env[63371]: DEBUG oslo_vmware.api [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.310931] env[63371]: DEBUG nova.network.neutron [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Successfully created port: 02145be4-05da-4b04-95ab-e7aa717efb9a {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1586.374258] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5fa836-f1b3-460d-9ad4-75f5fdea06ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.382105] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b495c50e-4fe6-4327-9a42-bb499926b238 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.413093] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f0fe21-8e07-4740-8ba4-0cc87bd9f3f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.420946] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2a2d40e-00b0-4c9a-ba48-fa79279b0fc6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.436894] env[63371]: DEBUG nova.compute.provider_tree [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1586.474526] env[63371]: DEBUG nova.compute.manager [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1586.538357] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774262, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.556653] env[63371]: DEBUG nova.network.neutron [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Successfully created port: 4a9c8b81-0ba5-4746-8695-2464b801b783 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1586.591350] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.767277] env[63371]: DEBUG oslo_vmware.api [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.943183] env[63371]: DEBUG nova.scheduler.client.report [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1587.044028] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774262, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.602204} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.044142] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956/12e393d7-e8d5-4a9a-bad7-3cfffbb9d956.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1587.044421] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1587.044738] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4bf4841d-e965-40bd-82f7-0fc76d15f394 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.053968] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1587.053968] env[63371]: value = "task-1774264" [ 1587.053968] env[63371]: _type = "Task" [ 1587.053968] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.066503] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774264, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.267643] env[63371]: DEBUG oslo_vmware.api [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.448439] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.986s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1587.451023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.587s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.451125] env[63371]: DEBUG nova.objects.instance [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1587.472283] env[63371]: INFO nova.scheduler.client.report [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Deleted allocations for instance 47c1c242-d190-4523-8033-307c5a9b7535 [ 1587.486174] env[63371]: DEBUG nova.compute.manager [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1587.512806] env[63371]: DEBUG nova.virt.hardware [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1587.513072] env[63371]: DEBUG nova.virt.hardware [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1587.513239] env[63371]: DEBUG nova.virt.hardware [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1587.513425] env[63371]: DEBUG nova.virt.hardware [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1587.513575] env[63371]: DEBUG nova.virt.hardware [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1587.513723] env[63371]: DEBUG nova.virt.hardware [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1587.513927] env[63371]: DEBUG nova.virt.hardware [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1587.514139] env[63371]: DEBUG nova.virt.hardware [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1587.514365] env[63371]: DEBUG nova.virt.hardware [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1587.514541] env[63371]: DEBUG nova.virt.hardware [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1587.514714] env[63371]: DEBUG nova.virt.hardware [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1587.515578] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f9caa1-5e10-4d9b-9c2f-13355f16d38a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.525086] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69d868c3-b206-4d6a-a560-c3007a6c2eae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.563018] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774264, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073019} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.563294] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1587.564103] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c65413-069f-4755-8405-921a6c343bd8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.587502] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956/12e393d7-e8d5-4a9a-bad7-3cfffbb9d956.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1587.587797] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34d03582-7941-4631-abb4-96c2599f4d12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.622625] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1587.622625] env[63371]: value = "task-1774265" [ 1587.622625] env[63371]: _type = "Task" [ 1587.622625] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.630776] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774265, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.768967] env[63371]: DEBUG oslo_vmware.api [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774263, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.979462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7953e85-f56d-4031-9d1e-c8de78363b02 tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "47c1c242-d190-4523-8033-307c5a9b7535" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.990s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.135123] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774265, 'name': ReconfigVM_Task, 'duration_secs': 0.305879} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.135348] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956/12e393d7-e8d5-4a9a-bad7-3cfffbb9d956.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1588.140039] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11524791-0c82-4bbe-8658-9c81da8f624d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.140039] env[63371]: DEBUG nova.compute.manager [req-4afa0015-5c39-4ade-bcf2-58a4ce599385 req-3b557ef2-a7d6-44ab-ae63-4c17464e7bb5 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Received event network-vif-plugged-02145be4-05da-4b04-95ab-e7aa717efb9a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1588.140039] env[63371]: DEBUG oslo_concurrency.lockutils [req-4afa0015-5c39-4ade-bcf2-58a4ce599385 req-3b557ef2-a7d6-44ab-ae63-4c17464e7bb5 service nova] Acquiring lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.140039] env[63371]: DEBUG oslo_concurrency.lockutils [req-4afa0015-5c39-4ade-bcf2-58a4ce599385 req-3b557ef2-a7d6-44ab-ae63-4c17464e7bb5 service nova] Lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.140226] env[63371]: DEBUG oslo_concurrency.lockutils [req-4afa0015-5c39-4ade-bcf2-58a4ce599385 req-3b557ef2-a7d6-44ab-ae63-4c17464e7bb5 service nova] Lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.140308] env[63371]: DEBUG nova.compute.manager [req-4afa0015-5c39-4ade-bcf2-58a4ce599385 req-3b557ef2-a7d6-44ab-ae63-4c17464e7bb5 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] No waiting events found dispatching network-vif-plugged-02145be4-05da-4b04-95ab-e7aa717efb9a {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1588.140459] env[63371]: WARNING nova.compute.manager [req-4afa0015-5c39-4ade-bcf2-58a4ce599385 req-3b557ef2-a7d6-44ab-ae63-4c17464e7bb5 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Received unexpected event network-vif-plugged-02145be4-05da-4b04-95ab-e7aa717efb9a for instance with vm_state building and task_state spawning. [ 1588.149955] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1588.149955] env[63371]: value = "task-1774266" [ 1588.149955] env[63371]: _type = "Task" [ 1588.149955] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.171382] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774266, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.214184] env[63371]: DEBUG nova.network.neutron [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Successfully updated port: 02145be4-05da-4b04-95ab-e7aa717efb9a {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1588.275720] env[63371]: DEBUG oslo_vmware.api [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774263, 'name': PowerOffVM_Task, 'duration_secs': 1.760843} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.276423] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1588.278098] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1588.278098] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e90d1add-0e05-4e0c-8e79-d294b725106e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.376530] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1588.376634] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1588.376750] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Deleting the datastore file [datastore1] 1276e001-fb07-4367-8b03-81c5fe5fbd0d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1588.377049] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ebb81b5-f412-4493-9b1c-3adc4c72c6ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.386518] env[63371]: DEBUG oslo_vmware.api [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Waiting for the task: (returnval){ [ 1588.386518] env[63371]: value = "task-1774268" [ 1588.386518] env[63371]: _type = "Task" [ 1588.386518] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.396207] env[63371]: DEBUG oslo_vmware.api [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774268, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.462641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ae15ee2f-2390-4324-8525-f94b676255d0 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.463810] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 33.072s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.660270] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774266, 'name': Rename_Task, 'duration_secs': 0.173049} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.660610] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1588.660877] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b1153fd-0ade-4ec2-9b7a-72f9fe04163e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.668302] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1588.668302] env[63371]: value = "task-1774269" [ 1588.668302] env[63371]: _type = "Task" [ 1588.668302] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.676289] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774269, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.898809] env[63371]: DEBUG oslo_vmware.api [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Task: {'id': task-1774268, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150559} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.899163] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1588.899336] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1588.899481] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1588.899694] env[63371]: INFO nova.compute.manager [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Took 2.67 seconds to destroy the instance on the hypervisor. [ 1588.899897] env[63371]: DEBUG oslo.service.loopingcall [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1588.900107] env[63371]: DEBUG nova.compute.manager [-] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1588.900205] env[63371]: DEBUG nova.network.neutron [-] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1588.966076] env[63371]: DEBUG nova.objects.instance [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lazy-loading 'migration_context' on Instance uuid 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1589.184673] env[63371]: DEBUG oslo_vmware.api [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774269, 'name': PowerOnVM_Task, 'duration_secs': 0.499845} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.185259] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1589.185583] env[63371]: INFO nova.compute.manager [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Took 9.63 seconds to spawn the instance on the hypervisor. [ 1589.185891] env[63371]: DEBUG nova.compute.manager [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1589.188106] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265bb763-171b-41ad-964a-622e035066eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.230839] env[63371]: DEBUG nova.compute.manager [req-6a9950ee-b231-4dfa-93c0-eb065c23ceab req-76d7dd8f-e13b-4ee2-8978-1a98fd069791 service nova] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Received event network-vif-deleted-0afc5c03-432d-430e-aadd-25d2ee234367 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1589.231052] env[63371]: INFO nova.compute.manager [req-6a9950ee-b231-4dfa-93c0-eb065c23ceab req-76d7dd8f-e13b-4ee2-8978-1a98fd069791 service nova] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Neutron deleted interface 0afc5c03-432d-430e-aadd-25d2ee234367; detaching it from the instance and deleting it from the info cache [ 1589.231225] env[63371]: DEBUG nova.network.neutron [req-6a9950ee-b231-4dfa-93c0-eb065c23ceab req-76d7dd8f-e13b-4ee2-8978-1a98fd069791 service nova] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.335379] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "0cd2018f-7a54-4458-b5fd-353ab75ffbfd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.335608] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "0cd2018f-7a54-4458-b5fd-353ab75ffbfd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.709754] env[63371]: DEBUG nova.network.neutron [-] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.710852] env[63371]: INFO nova.compute.manager [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Took 47.87 seconds to build instance. [ 1589.733684] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4314cee-5436-40a9-ad25-8b29f445a9af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.746620] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d822fd-8cd6-4edd-90aa-f04893ea91ee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.785652] env[63371]: DEBUG nova.compute.manager [req-6a9950ee-b231-4dfa-93c0-eb065c23ceab req-76d7dd8f-e13b-4ee2-8978-1a98fd069791 service nova] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Detach interface failed, port_id=0afc5c03-432d-430e-aadd-25d2ee234367, reason: Instance 1276e001-fb07-4367-8b03-81c5fe5fbd0d could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1589.841922] env[63371]: DEBUG nova.compute.manager [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1589.878272] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4864a516-b2a3-4585-9702-7caf5518c272 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.889943] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1636d8ca-00b3-458c-b030-4a33cf604647 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.933461] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9649fc35-cc80-4597-b151-60b498071ae7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.943353] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0408f501-7126-43a9-aa11-0ac5a75fb2e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.962636] env[63371]: DEBUG nova.compute.provider_tree [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1590.212737] env[63371]: INFO nova.compute.manager [-] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Took 1.31 seconds to deallocate network for instance. [ 1590.213217] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fe25059-a9f9-41da-ad0b-1ade97e5bf4a tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.384s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.245464] env[63371]: DEBUG nova.compute.manager [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Received event network-changed-02145be4-05da-4b04-95ab-e7aa717efb9a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1590.245670] env[63371]: DEBUG nova.compute.manager [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Refreshing instance network info cache due to event network-changed-02145be4-05da-4b04-95ab-e7aa717efb9a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1590.245895] env[63371]: DEBUG oslo_concurrency.lockutils [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] Acquiring lock "refresh_cache-3bd1c148-a48d-402c-bd76-2cb1d38b49f7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.246048] env[63371]: DEBUG oslo_concurrency.lockutils [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] Acquired lock "refresh_cache-3bd1c148-a48d-402c-bd76-2cb1d38b49f7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1590.246311] env[63371]: DEBUG nova.network.neutron [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Refreshing network info cache for port 02145be4-05da-4b04-95ab-e7aa717efb9a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1590.271703] env[63371]: DEBUG nova.network.neutron [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Successfully updated port: 4a9c8b81-0ba5-4746-8695-2464b801b783 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1590.360091] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.489929] env[63371]: ERROR nova.scheduler.client.report [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [req-ad204ef7-affa-4e78-b2f0-6e5c1825983d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ad204ef7-affa-4e78-b2f0-6e5c1825983d"}]} [ 1590.506487] env[63371]: DEBUG nova.scheduler.client.report [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1590.531215] env[63371]: DEBUG nova.scheduler.client.report [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1590.531481] env[63371]: DEBUG nova.compute.provider_tree [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1590.547724] env[63371]: DEBUG nova.scheduler.client.report [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1590.566829] env[63371]: DEBUG nova.scheduler.client.report [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1590.720440] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.773753] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "refresh_cache-3bd1c148-a48d-402c-bd76-2cb1d38b49f7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.799693] env[63371]: DEBUG nova.network.neutron [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1590.924135] env[63371]: DEBUG nova.network.neutron [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1591.003155] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdcde06-b097-4c61-9894-84e8b05e43d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.011643] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54488a58-f624-44ad-9b78-e20959667613 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.044427] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6aaddad-9921-4c39-ac9e-8eca00a593f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.052814] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793c483c-6984-42be-b515-5f4eaa1ae013 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.067197] env[63371]: DEBUG nova.compute.provider_tree [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1591.426666] env[63371]: DEBUG oslo_concurrency.lockutils [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] Releasing lock "refresh_cache-3bd1c148-a48d-402c-bd76-2cb1d38b49f7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.426945] env[63371]: DEBUG nova.compute.manager [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Received event network-vif-plugged-4a9c8b81-0ba5-4746-8695-2464b801b783 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1591.427219] env[63371]: DEBUG oslo_concurrency.lockutils [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] Acquiring lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1591.427387] env[63371]: DEBUG oslo_concurrency.lockutils [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] Lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1591.427552] env[63371]: DEBUG oslo_concurrency.lockutils [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] Lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1591.427718] env[63371]: DEBUG nova.compute.manager [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] No waiting events found dispatching network-vif-plugged-4a9c8b81-0ba5-4746-8695-2464b801b783 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1591.427885] env[63371]: WARNING nova.compute.manager [req-a97b1858-3917-4303-8e1d-5b5f439c3b21 req-8628fa03-6f62-40d7-b3eb-7c7c5c85f996 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Received unexpected event network-vif-plugged-4a9c8b81-0ba5-4746-8695-2464b801b783 for instance with vm_state building and task_state spawning. [ 1591.428259] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquired lock "refresh_cache-3bd1c148-a48d-402c-bd76-2cb1d38b49f7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.428402] env[63371]: DEBUG nova.network.neutron [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1591.597909] env[63371]: DEBUG nova.scheduler.client.report [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 93 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1591.598204] env[63371]: DEBUG nova.compute.provider_tree [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 93 to 94 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1591.598384] env[63371]: DEBUG nova.compute.provider_tree [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1591.699542] env[63371]: INFO nova.compute.manager [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Rebuilding instance [ 1591.741369] env[63371]: DEBUG nova.compute.manager [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1591.742398] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576fe1b6-4934-4aca-a132-b32de6f6f735 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.966422] env[63371]: DEBUG nova.network.neutron [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1592.253951] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1592.254283] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a39860c8-6128-42e9-be1e-6ca84b37cd03 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.259935] env[63371]: DEBUG nova.network.neutron [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Updating instance_info_cache with network_info: [{"id": "02145be4-05da-4b04-95ab-e7aa717efb9a", "address": "fa:16:3e:b9:30:33", "network": {"id": "f0bb68c8-94a0-4f5f-b54f-d0cdfce34659", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-353700507", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02145be4-05", "ovs_interfaceid": "02145be4-05da-4b04-95ab-e7aa717efb9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4a9c8b81-0ba5-4746-8695-2464b801b783", "address": "fa:16:3e:81:5f:97", "network": {"id": "d75d99fb-ab3f-4215-8583-a34dda7db532", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1112664253", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5756d009-13ad-4e13-a991-3b5e71830aa5", "external-id": "nsx-vlan-transportzone-608", "segmentation_id": 608, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a9c8b81-0b", "ovs_interfaceid": "4a9c8b81-0ba5-4746-8695-2464b801b783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.262999] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1592.262999] env[63371]: value = "task-1774270" [ 1592.262999] env[63371]: _type = "Task" [ 1592.262999] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.269680] env[63371]: DEBUG nova.compute.manager [req-a7b4751c-319a-4031-bb7d-cec9bda8896f req-d3588d6e-a274-40ce-a885-cdca2bd295cf service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Received event network-changed-4a9c8b81-0ba5-4746-8695-2464b801b783 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1592.269862] env[63371]: DEBUG nova.compute.manager [req-a7b4751c-319a-4031-bb7d-cec9bda8896f req-d3588d6e-a274-40ce-a885-cdca2bd295cf service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Refreshing instance network info cache due to event network-changed-4a9c8b81-0ba5-4746-8695-2464b801b783. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1592.270123] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7b4751c-319a-4031-bb7d-cec9bda8896f req-d3588d6e-a274-40ce-a885-cdca2bd295cf service nova] Acquiring lock "refresh_cache-3bd1c148-a48d-402c-bd76-2cb1d38b49f7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1592.273631] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774270, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.609825] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 4.146s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.615951] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.252s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.616175] env[63371]: DEBUG nova.objects.instance [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1592.763663] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Releasing lock "refresh_cache-3bd1c148-a48d-402c-bd76-2cb1d38b49f7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1592.763663] env[63371]: DEBUG nova.compute.manager [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Instance network_info: |[{"id": "02145be4-05da-4b04-95ab-e7aa717efb9a", "address": "fa:16:3e:b9:30:33", "network": {"id": "f0bb68c8-94a0-4f5f-b54f-d0cdfce34659", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-353700507", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02145be4-05", "ovs_interfaceid": "02145be4-05da-4b04-95ab-e7aa717efb9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4a9c8b81-0ba5-4746-8695-2464b801b783", "address": "fa:16:3e:81:5f:97", "network": {"id": "d75d99fb-ab3f-4215-8583-a34dda7db532", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1112664253", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5756d009-13ad-4e13-a991-3b5e71830aa5", "external-id": "nsx-vlan-transportzone-608", "segmentation_id": 608, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a9c8b81-0b", "ovs_interfaceid": "4a9c8b81-0ba5-4746-8695-2464b801b783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1592.764090] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7b4751c-319a-4031-bb7d-cec9bda8896f req-d3588d6e-a274-40ce-a885-cdca2bd295cf service nova] Acquired lock "refresh_cache-3bd1c148-a48d-402c-bd76-2cb1d38b49f7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1592.764314] env[63371]: DEBUG nova.network.neutron [req-a7b4751c-319a-4031-bb7d-cec9bda8896f req-d3588d6e-a274-40ce-a885-cdca2bd295cf service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Refreshing network info cache for port 4a9c8b81-0ba5-4746-8695-2464b801b783 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1592.765565] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b9:30:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04ebd8af-aaf6-4d04-b869-3882e2571ed7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02145be4-05da-4b04-95ab-e7aa717efb9a', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:5f:97', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5756d009-13ad-4e13-a991-3b5e71830aa5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a9c8b81-0ba5-4746-8695-2464b801b783', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1592.776127] env[63371]: DEBUG oslo.service.loopingcall [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1592.780349] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1592.781019] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-83098ed7-d0d6-483a-af47-863dd495f593 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.803315] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774270, 'name': PowerOffVM_Task, 'duration_secs': 0.209304} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.804661] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1592.804891] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1592.805161] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1592.805161] env[63371]: value = "task-1774271" [ 1592.805161] env[63371]: _type = "Task" [ 1592.805161] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.805863] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4cb97c-b48a-453c-9777-dbbae8469b30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.816466] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774271, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.818479] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1592.818710] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-506d99e3-9cb1-4320-8d73-45c17ddbba68 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.911947] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1592.912422] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1592.912743] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleting the datastore file [datastore1] 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1592.913072] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57a05447-980b-47e1-9811-bd45a0f37547 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.920316] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1592.920316] env[63371]: value = "task-1774273" [ 1592.920316] env[63371]: _type = "Task" [ 1592.920316] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.929099] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774273, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.322664] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774271, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.430591] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774273, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.475133} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.431014] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1593.431255] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1593.431491] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1593.549975] env[63371]: DEBUG nova.network.neutron [req-a7b4751c-319a-4031-bb7d-cec9bda8896f req-d3588d6e-a274-40ce-a885-cdca2bd295cf service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Updated VIF entry in instance network info cache for port 4a9c8b81-0ba5-4746-8695-2464b801b783. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1593.550453] env[63371]: DEBUG nova.network.neutron [req-a7b4751c-319a-4031-bb7d-cec9bda8896f req-d3588d6e-a274-40ce-a885-cdca2bd295cf service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Updating instance_info_cache with network_info: [{"id": "02145be4-05da-4b04-95ab-e7aa717efb9a", "address": "fa:16:3e:b9:30:33", "network": {"id": "f0bb68c8-94a0-4f5f-b54f-d0cdfce34659", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-353700507", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.47", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02145be4-05", "ovs_interfaceid": "02145be4-05da-4b04-95ab-e7aa717efb9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4a9c8b81-0ba5-4746-8695-2464b801b783", "address": "fa:16:3e:81:5f:97", "network": {"id": "d75d99fb-ab3f-4215-8583-a34dda7db532", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1112664253", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.116", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "6919de51a2ef456db7a25d4cec1e26ad", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5756d009-13ad-4e13-a991-3b5e71830aa5", "external-id": "nsx-vlan-transportzone-608", "segmentation_id": 608, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a9c8b81-0b", "ovs_interfaceid": "4a9c8b81-0ba5-4746-8695-2464b801b783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1593.624892] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b98081fa-459c-448e-80fa-bd4c3e6cf849 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.626065] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.923s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1593.626328] env[63371]: DEBUG nova.objects.instance [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lazy-loading 'resources' on Instance uuid b880750e-7bf4-412c-bcff-eb2c343f60f0 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1593.819545] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774271, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.053944] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7b4751c-319a-4031-bb7d-cec9bda8896f req-d3588d6e-a274-40ce-a885-cdca2bd295cf service nova] Releasing lock "refresh_cache-3bd1c148-a48d-402c-bd76-2cb1d38b49f7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.149895] env[63371]: INFO nova.compute.manager [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Swapping old allocation on dict_keys(['c079ebb1-2fa2-4df9-bdab-118e305653c1']) held by migration 0d1a44d4-1ccf-4ed5-a60b-ac0e82931d09 for instance [ 1594.173867] env[63371]: DEBUG nova.scheduler.client.report [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Overwriting current allocation {'allocations': {'c079ebb1-2fa2-4df9-bdab-118e305653c1': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 94}}, 'project_id': 'c76a64c712ca4aa98c19600ef0469855', 'user_id': 'd6aa709a53564231ac25fb3e878239ab', 'consumer_generation': 1} on consumer 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f {{(pid=63371) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1594.246897] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.247107] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquired lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.247283] env[63371]: DEBUG nova.network.neutron [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1594.320079] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774271, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.463125] env[63371]: DEBUG nova.virt.hardware [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1594.463426] env[63371]: DEBUG nova.virt.hardware [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1594.463518] env[63371]: DEBUG nova.virt.hardware [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1594.463696] env[63371]: DEBUG nova.virt.hardware [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1594.463837] env[63371]: DEBUG nova.virt.hardware [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1594.464020] env[63371]: DEBUG nova.virt.hardware [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1594.464191] env[63371]: DEBUG nova.virt.hardware [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1594.464347] env[63371]: DEBUG nova.virt.hardware [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1594.464538] env[63371]: DEBUG nova.virt.hardware [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1594.464716] env[63371]: DEBUG nova.virt.hardware [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1594.464885] env[63371]: DEBUG nova.virt.hardware [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1594.465787] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f13fac-6106-41fd-ada0-4e065bb1d882 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.476433] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d769c66-d54b-437c-a736-4bda94c4f30f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.481347] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbd18ee-c120-4868-a1ca-7651ff917a93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.494939] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:3c:cc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a25f7a2e-b96f-4966-a665-76f86d05a00d', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1594.502579] env[63371]: DEBUG oslo.service.loopingcall [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1594.502853] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1594.503751] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35da8065-3d19-44cc-bab8-ff20124a9eca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.506652] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-167e10b1-0067-4bc3-be17-c923d9a5f830 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.637060] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee808ab-7633-4b90-9739-78abe9042f19 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.637060] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1594.637060] env[63371]: value = "task-1774274" [ 1594.637060] env[63371]: _type = "Task" [ 1594.637060] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.637060] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8154fbcf-a8d5-4f1b-a99e-8240d29f6f01 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.637060] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774274, 'name': CreateVM_Task} progress is 15%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.637060] env[63371]: DEBUG nova.compute.provider_tree [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1594.820249] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774271, 'name': CreateVM_Task, 'duration_secs': 1.789748} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.823105] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1594.824149] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.824331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.824702] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1594.825437] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0000d4a-6d70-4e33-9b5e-68c8ee86a316 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.831270] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1594.831270] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52860b2f-d70f-7d7e-c460-dd1c62a3e098" [ 1594.831270] env[63371]: _type = "Task" [ 1594.831270] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.840643] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52860b2f-d70f-7d7e-c460-dd1c62a3e098, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.966977] env[63371]: DEBUG nova.network.neutron [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [{"id": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "address": "fa:16:3e:e7:1c:c5", "network": {"id": "8c47cf76-a3e6-4b39-b62e-77555091ae8a", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.181", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "494d32be2d74438a81d240ce9a488f98", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cbf3349e-d05e-4d44-a011-c4b6e41af988", "external-id": "nsx-vlan-transportzone-196", "segmentation_id": 196, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape13a7d6d-66", "ovs_interfaceid": "e13a7d6d-6643-4b64-a4b1-2a59397c5307", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.066353] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774274, 'name': CreateVM_Task, 'duration_secs': 0.363447} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.066524] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1595.067172] env[63371]: DEBUG oslo_concurrency.lockutils [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.106174] env[63371]: ERROR nova.scheduler.client.report [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [req-4cc93883-32b4-4563-913a-591c567b947a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4cc93883-32b4-4563-913a-591c567b947a"}]} [ 1595.121939] env[63371]: DEBUG nova.scheduler.client.report [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1595.133703] env[63371]: DEBUG nova.scheduler.client.report [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1595.133938] env[63371]: DEBUG nova.compute.provider_tree [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1595.145548] env[63371]: DEBUG nova.scheduler.client.report [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1595.161900] env[63371]: DEBUG nova.scheduler.client.report [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1595.341480] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52860b2f-d70f-7d7e-c460-dd1c62a3e098, 'name': SearchDatastore_Task, 'duration_secs': 0.01698} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.343867] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.344143] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1595.344381] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.344526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.344701] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1595.345169] env[63371]: DEBUG oslo_concurrency.lockutils [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.345470] env[63371]: DEBUG oslo_concurrency.lockutils [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1595.345696] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17242775-9cec-4cc9-b88c-8659415d2757 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.347409] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6ca6a76-aa8b-4016-a325-09c92375abd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.352906] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1595.352906] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ac441d-9c44-ed9c-903f-3160391d0d2a" [ 1595.352906] env[63371]: _type = "Task" [ 1595.352906] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.362592] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ac441d-9c44-ed9c-903f-3160391d0d2a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.426295] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1595.426490] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1595.429498] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b22dae2-e17f-4bc7-8668-5bf29cef4730 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.435770] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1595.435770] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522925f2-1b02-71f3-59eb-4b24b3dbca8a" [ 1595.435770] env[63371]: _type = "Task" [ 1595.435770] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.445843] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522925f2-1b02-71f3-59eb-4b24b3dbca8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.469762] env[63371]: DEBUG oslo_concurrency.lockutils [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Releasing lock "refresh_cache-96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.470608] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1595.471024] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83ea766c-fc07-494f-a8f0-30daa1ecb871 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.478607] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1595.478607] env[63371]: value = "task-1774275" [ 1595.478607] env[63371]: _type = "Task" [ 1595.478607] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.487020] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774275, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.500580] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620393d2-876c-4587-878b-8493c50435fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.508075] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e18e19-c334-4baa-a89c-ae2f489ac725 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.539425] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6c7e19-6a59-4776-9e76-7643ca3da233 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.548613] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684a5a67-e082-4ecc-a301-6d77caa1cf05 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.563694] env[63371]: DEBUG nova.compute.provider_tree [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1595.864062] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ac441d-9c44-ed9c-903f-3160391d0d2a, 'name': SearchDatastore_Task, 'duration_secs': 0.085233} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.864062] env[63371]: DEBUG oslo_concurrency.lockutils [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1595.864247] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1595.864453] env[63371]: DEBUG oslo_concurrency.lockutils [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.946351] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522925f2-1b02-71f3-59eb-4b24b3dbca8a, 'name': SearchDatastore_Task, 'duration_secs': 0.016177} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.947114] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0c754f6-b288-4c4d-811b-4d6ed80e4107 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.952931] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1595.952931] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52af32f2-a02b-ceaf-f99e-74a0658516d8" [ 1595.952931] env[63371]: _type = "Task" [ 1595.952931] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1595.961065] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52af32f2-a02b-ceaf-f99e-74a0658516d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.988212] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774275, 'name': PowerOffVM_Task, 'duration_secs': 0.218504} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.988468] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1595.989134] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:32:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bb3da7ed-b700-420c-a825-23c0d1a3f881',id=26,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2130760861',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1595.989353] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1595.989508] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1595.989687] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1595.989829] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1595.989973] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1595.990190] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1595.990347] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1595.990513] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1595.990671] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1595.990840] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1595.995818] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34cc2537-3b27-4bb8-ad65-583c00ccb52b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.011406] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1596.011406] env[63371]: value = "task-1774276" [ 1596.011406] env[63371]: _type = "Task" [ 1596.011406] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.023421] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774276, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.095996] env[63371]: DEBUG nova.scheduler.client.report [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 95 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1596.096271] env[63371]: DEBUG nova.compute.provider_tree [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 95 to 96 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1596.096454] env[63371]: DEBUG nova.compute.provider_tree [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1596.464065] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52af32f2-a02b-ceaf-f99e-74a0658516d8, 'name': SearchDatastore_Task, 'duration_secs': 0.331079} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.464219] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.464476] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3bd1c148-a48d-402c-bd76-2cb1d38b49f7/3bd1c148-a48d-402c-bd76-2cb1d38b49f7.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1596.464752] env[63371]: DEBUG oslo_concurrency.lockutils [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1596.464934] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1596.465164] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a05600d-27c3-44b1-9fb0-ff7783be4b29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.467032] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b505ec86-738d-4ba0-aade-025de8e5630a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.474971] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1596.474971] env[63371]: value = "task-1774277" [ 1596.474971] env[63371]: _type = "Task" [ 1596.474971] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.476083] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1596.476253] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1596.479436] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d633f45-3fd3-46ec-9700-ea0b337f9a16 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.486372] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774277, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.487525] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1596.487525] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5217c90c-f375-a27f-6849-754b1156d245" [ 1596.487525] env[63371]: _type = "Task" [ 1596.487525] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.494966] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5217c90c-f375-a27f-6849-754b1156d245, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.521835] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774276, 'name': ReconfigVM_Task, 'duration_secs': 0.164306} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1596.522673] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e6834d-6e93-4f40-8a26-32aa195ef7b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.543880] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:32:54Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='bb3da7ed-b700-420c-a825-23c0d1a3f881',id=26,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2130760861',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1596.544141] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1596.544361] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1596.544608] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1596.544773] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1596.544923] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1596.545179] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1596.545368] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1596.545568] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1596.545779] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1596.545995] env[63371]: DEBUG nova.virt.hardware [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1596.546944] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03e68f5d-275a-42d9-9fc8-6c825c0f1866 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.554156] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1596.554156] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5228f40f-497b-066b-1413-71d9f62529b7" [ 1596.554156] env[63371]: _type = "Task" [ 1596.554156] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1596.563245] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5228f40f-497b-066b-1413-71d9f62529b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.602490] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.976s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.605196] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.222s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.606888] env[63371]: INFO nova.compute.claims [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1596.626442] env[63371]: INFO nova.scheduler.client.report [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted allocations for instance b880750e-7bf4-412c-bcff-eb2c343f60f0 [ 1596.989344] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774277, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.001683] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5217c90c-f375-a27f-6849-754b1156d245, 'name': SearchDatastore_Task, 'duration_secs': 0.011161} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.002722] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05af8f73-1b01-4cf0-9860-e9a7a1ff5ed6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.011167] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1597.011167] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52af13e2-6029-9b9b-d01c-0ce315943dcc" [ 1597.011167] env[63371]: _type = "Task" [ 1597.011167] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.022097] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52af13e2-6029-9b9b-d01c-0ce315943dcc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.064058] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5228f40f-497b-066b-1413-71d9f62529b7, 'name': SearchDatastore_Task, 'duration_secs': 0.010312} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.069330] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfiguring VM instance instance-00000031 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1597.069612] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a270249-8eb9-4361-8a88-3bb601a3e712 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.088434] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1597.088434] env[63371]: value = "task-1774278" [ 1597.088434] env[63371]: _type = "Task" [ 1597.088434] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.096436] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774278, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.134665] env[63371]: DEBUG oslo_concurrency.lockutils [None req-89ba935e-e02a-454d-bf56-ee730db4c371 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "b880750e-7bf4-412c-bcff-eb2c343f60f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.765s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1597.487365] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774277, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.560118} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.487637] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3bd1c148-a48d-402c-bd76-2cb1d38b49f7/3bd1c148-a48d-402c-bd76-2cb1d38b49f7.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1597.487846] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1597.488195] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1c82459-8557-4de8-9b1e-f2dcfee4a4c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.495146] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1597.495146] env[63371]: value = "task-1774279" [ 1597.495146] env[63371]: _type = "Task" [ 1597.495146] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.503997] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774279, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.521483] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52af13e2-6029-9b9b-d01c-0ce315943dcc, 'name': SearchDatastore_Task, 'duration_secs': 0.015059} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.521761] env[63371]: DEBUG oslo_concurrency.lockutils [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1597.522025] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956/12e393d7-e8d5-4a9a-bad7-3cfffbb9d956.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1597.522289] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73c77574-6899-4c92-ae6c-5d12e0e97aae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.529533] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1597.529533] env[63371]: value = "task-1774280" [ 1597.529533] env[63371]: _type = "Task" [ 1597.529533] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.537602] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774280, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.599370] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774278, 'name': ReconfigVM_Task, 'duration_secs': 0.226746} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.599866] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfigured VM instance instance-00000031 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1597.600868] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c20e0f-f2c7-4bbf-9f09-f21ea63a36bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.627237] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1597.627900] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23eb6e2d-9e3f-4254-ba91-3f36b37c009b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.646861] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1597.646861] env[63371]: value = "task-1774281" [ 1597.646861] env[63371]: _type = "Task" [ 1597.646861] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.658682] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774281, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.006803] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774279, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074975} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.009591] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1598.010656] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c73d05-9ebb-4a69-a1b5-c689c17fe746 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.036198] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 3bd1c148-a48d-402c-bd76-2cb1d38b49f7/3bd1c148-a48d-402c-bd76-2cb1d38b49f7.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1598.038907] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7183be2-9cc6-4703-93f1-2d442c6af996 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.061581] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774280, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.495378} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.062370] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956/12e393d7-e8d5-4a9a-bad7-3cfffbb9d956.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1598.062634] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1598.062945] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1598.062945] env[63371]: value = "task-1774282" [ 1598.062945] env[63371]: _type = "Task" [ 1598.062945] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.065221] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc61bebe-656a-45a9-abdc-53782fbf1ea5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.074421] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774282, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.075586] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1598.075586] env[63371]: value = "task-1774283" [ 1598.075586] env[63371]: _type = "Task" [ 1598.075586] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.079571] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6674b1d3-38cb-4155-b96f-55dd999669a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.089135] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774283, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.090270] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c330f5-fe72-4486-ab37-45101ba94031 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.124332] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3a93af-e0f6-4401-803c-5518ca549b39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.131970] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e38d2f0-7be7-40d7-b9b0-2a459ea60541 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.149465] env[63371]: DEBUG nova.compute.provider_tree [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1598.156225] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774281, 'name': ReconfigVM_Task, 'duration_secs': 0.336297} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.157137] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f/96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1598.158013] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4887ec6c-f7fd-4c43-9b7c-b4b9068a303b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.180245] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36fa974f-8a5f-489a-8e43-3ec9ef2a02c1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.197853] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c266a85-42aa-4e10-8803-8ac775283b53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.218855] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d7fdd5-f5ab-4d19-a460-e46dee9472cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.226120] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1598.226398] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-805295f5-e56c-4c86-ba66-436548f9364c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.233133] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1598.233133] env[63371]: value = "task-1774284" [ 1598.233133] env[63371]: _type = "Task" [ 1598.233133] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.240973] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774284, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.580903] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774282, 'name': ReconfigVM_Task, 'duration_secs': 0.29723} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.584782] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 3bd1c148-a48d-402c-bd76-2cb1d38b49f7/3bd1c148-a48d-402c-bd76-2cb1d38b49f7.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1598.585537] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51c500e6-9099-45d4-94e0-96be3bf7a428 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.593589] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774283, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068908} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.595210] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1598.595661] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1598.595661] env[63371]: value = "task-1774285" [ 1598.595661] env[63371]: _type = "Task" [ 1598.595661] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.597102] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-444c810a-5116-4714-809f-3a263557c953 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.625603] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774285, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.635756] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956/12e393d7-e8d5-4a9a-bad7-3cfffbb9d956.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1598.636184] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15c4404f-5ca5-4851-968b-ff4130f56a77 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.654048] env[63371]: DEBUG nova.scheduler.client.report [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1598.663194] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1598.663194] env[63371]: value = "task-1774286" [ 1598.663194] env[63371]: _type = "Task" [ 1598.663194] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.671684] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774286, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.744615] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774284, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.770635] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "40644960-1400-4dc6-9f2b-78afb7492a8d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.770871] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "40644960-1400-4dc6-9f2b-78afb7492a8d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1599.110216] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774285, 'name': Rename_Task, 'duration_secs': 0.166632} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.110514] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1599.110763] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-017946ef-9e23-4d7c-9d1d-1af66bb20e0d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.117506] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1599.117506] env[63371]: value = "task-1774287" [ 1599.117506] env[63371]: _type = "Task" [ 1599.117506] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.124600] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774287, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.159071] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1599.159961] env[63371]: DEBUG nova.compute.manager [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1599.163033] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.959s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1599.164243] env[63371]: INFO nova.compute.claims [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1599.176141] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774286, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.243795] env[63371]: DEBUG oslo_vmware.api [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774284, 'name': PowerOnVM_Task, 'duration_secs': 0.561016} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.243934] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1599.273296] env[63371]: DEBUG nova.compute.manager [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1599.627643] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774287, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.670934] env[63371]: DEBUG nova.compute.utils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1599.672384] env[63371]: DEBUG nova.compute.manager [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1599.672547] env[63371]: DEBUG nova.network.neutron [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1599.683277] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774286, 'name': ReconfigVM_Task, 'duration_secs': 0.852304} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.683529] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956/12e393d7-e8d5-4a9a-bad7-3cfffbb9d956.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1599.684118] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95f12fdb-bf1c-4595-9e02-08eba3d31e37 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.690500] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1599.690500] env[63371]: value = "task-1774288" [ 1599.690500] env[63371]: _type = "Task" [ 1599.690500] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.700458] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774288, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.756128] env[63371]: DEBUG nova.policy [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4352d55b3e7b400281f28dc09b2676f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ab55ffce414a461aa6f77a83100b7346', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1599.800099] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.053667] env[63371]: DEBUG nova.network.neutron [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Successfully created port: b12875f1-bdc7-4980-9223-c0a2fee47b86 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1600.128521] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774287, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.178348] env[63371]: DEBUG nova.compute.manager [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1600.200456] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774288, 'name': Rename_Task, 'duration_secs': 0.174268} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.200888] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1600.200989] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c61e8be7-0d0d-4904-9e0c-b3ecb4e23c6b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.207607] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1600.207607] env[63371]: value = "task-1774289" [ 1600.207607] env[63371]: _type = "Task" [ 1600.207607] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1600.226329] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774289, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.255309] env[63371]: INFO nova.compute.manager [None req-25bb1683-c78c-4e1f-a8e0-a532ef3812ff tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance to original state: 'active' [ 1600.565534] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bcc79c1-b4c1-485d-841a-5847183460e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.574080] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5e2249-9c4f-4529-8673-19c022351beb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.604625] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118d446c-02fd-4fa1-aaa9-131d4988c8cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.612453] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a279ad-6322-4721-8870-6759a2026c0b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.626146] env[63371]: DEBUG nova.compute.provider_tree [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1600.635239] env[63371]: DEBUG oslo_vmware.api [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774287, 'name': PowerOnVM_Task, 'duration_secs': 1.109789} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.636140] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1600.636344] env[63371]: INFO nova.compute.manager [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Took 13.15 seconds to spawn the instance on the hypervisor. [ 1600.636526] env[63371]: DEBUG nova.compute.manager [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1600.637368] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd51c1e-98f7-40d1-af9c-d36957b47052 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.719938] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774289, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.131513] env[63371]: DEBUG nova.scheduler.client.report [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1601.155849] env[63371]: INFO nova.compute.manager [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Took 53.61 seconds to build instance. [ 1601.188210] env[63371]: DEBUG nova.compute.manager [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1601.214377] env[63371]: DEBUG nova.virt.hardware [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1601.214659] env[63371]: DEBUG nova.virt.hardware [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1601.214818] env[63371]: DEBUG nova.virt.hardware [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1601.214998] env[63371]: DEBUG nova.virt.hardware [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1601.215157] env[63371]: DEBUG nova.virt.hardware [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1601.215300] env[63371]: DEBUG nova.virt.hardware [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1601.215520] env[63371]: DEBUG nova.virt.hardware [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1601.215694] env[63371]: DEBUG nova.virt.hardware [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1601.215882] env[63371]: DEBUG nova.virt.hardware [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1601.216057] env[63371]: DEBUG nova.virt.hardware [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1601.216230] env[63371]: DEBUG nova.virt.hardware [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1601.216995] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec903aa-40ea-46d7-b91c-586d9af68887 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.227214] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4840c7db-1ee5-496b-bf15-7241ebb8c191 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.230841] env[63371]: DEBUG oslo_vmware.api [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774289, 'name': PowerOnVM_Task, 'duration_secs': 0.59488} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.231421] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1601.231636] env[63371]: DEBUG nova.compute.manager [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1601.232664] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f8162f-8ba8-4a3b-be4f-7cce930e27d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.395552] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.395825] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.396046] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.396240] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.396409] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.398689] env[63371]: INFO nova.compute.manager [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Terminating instance [ 1601.402665] env[63371]: DEBUG nova.compute.manager [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1601.404485] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1601.405391] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad7382d-024d-498f-b752-7fee5616cd81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.415069] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1601.415311] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61c0dfc3-0262-4075-84e9-6f8c3faad58b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.421713] env[63371]: DEBUG oslo_vmware.api [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1601.421713] env[63371]: value = "task-1774290" [ 1601.421713] env[63371]: _type = "Task" [ 1601.421713] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.431249] env[63371]: DEBUG oslo_vmware.api [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774290, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.552253] env[63371]: DEBUG nova.compute.manager [req-bdce2068-cc15-4287-b2f4-d67c3018a4fd req-69d43fb6-cd88-43bc-b366-e6d070ef1209 service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Received event network-vif-plugged-b12875f1-bdc7-4980-9223-c0a2fee47b86 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1601.552506] env[63371]: DEBUG oslo_concurrency.lockutils [req-bdce2068-cc15-4287-b2f4-d67c3018a4fd req-69d43fb6-cd88-43bc-b366-e6d070ef1209 service nova] Acquiring lock "44a392e4-32c1-4aaf-8dc0-7df50c1a28c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.552691] env[63371]: DEBUG oslo_concurrency.lockutils [req-bdce2068-cc15-4287-b2f4-d67c3018a4fd req-69d43fb6-cd88-43bc-b366-e6d070ef1209 service nova] Lock "44a392e4-32c1-4aaf-8dc0-7df50c1a28c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.552866] env[63371]: DEBUG oslo_concurrency.lockutils [req-bdce2068-cc15-4287-b2f4-d67c3018a4fd req-69d43fb6-cd88-43bc-b366-e6d070ef1209 service nova] Lock "44a392e4-32c1-4aaf-8dc0-7df50c1a28c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.553094] env[63371]: DEBUG nova.compute.manager [req-bdce2068-cc15-4287-b2f4-d67c3018a4fd req-69d43fb6-cd88-43bc-b366-e6d070ef1209 service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] No waiting events found dispatching network-vif-plugged-b12875f1-bdc7-4980-9223-c0a2fee47b86 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1601.553260] env[63371]: WARNING nova.compute.manager [req-bdce2068-cc15-4287-b2f4-d67c3018a4fd req-69d43fb6-cd88-43bc-b366-e6d070ef1209 service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Received unexpected event network-vif-plugged-b12875f1-bdc7-4980-9223-c0a2fee47b86 for instance with vm_state building and task_state spawning. [ 1601.635871] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.636604] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.474s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.637102] env[63371]: DEBUG nova.compute.manager [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1601.639922] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.416s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.640650] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.640650] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1601.640650] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.726s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.640787] env[63371]: DEBUG nova.objects.instance [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lazy-loading 'resources' on Instance uuid 1c93487b-6d8f-424d-8b95-10bfb894c609 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1601.643637] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6c4fec-9c80-46b5-8b05-2d23b8cf975f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.647657] env[63371]: DEBUG nova.network.neutron [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Successfully updated port: b12875f1-bdc7-4980-9223-c0a2fee47b86 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1601.654712] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1427ce79-2e8c-439c-8b3f-c969d1bc23a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.661341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8af7e2eb-2c94-4ac4-9b84-2187a119b9f4 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.213s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.661341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.025s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.661341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.661341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1601.661341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1601.667909] env[63371]: INFO nova.compute.manager [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Terminating instance [ 1601.678273] env[63371]: DEBUG nova.compute.manager [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1601.678273] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1601.679095] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8568f73-9593-4621-bf1b-e8dac7f6acb6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.683117] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d8a189-50d5-4cf1-b124-049b74b3f7fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.692821] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdec2013-f220-4ce7-a016-f138e924e992 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.695993] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1601.696736] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1212e79c-71c4-497f-8d96-5e57cc01f881 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.703024] env[63371]: DEBUG oslo_vmware.api [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1601.703024] env[63371]: value = "task-1774291" [ 1601.703024] env[63371]: _type = "Task" [ 1601.703024] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.731163] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178539MB free_disk=163GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1601.731338] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.741470] env[63371]: DEBUG oslo_vmware.api [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.759173] env[63371]: DEBUG oslo_concurrency.lockutils [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.932027] env[63371]: DEBUG oslo_vmware.api [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774290, 'name': PowerOffVM_Task, 'duration_secs': 0.424959} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.932148] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1601.932315] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1601.932680] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-227a2b61-09e5-40c0-8bdb-8a3b628c687a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.019557] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1602.019667] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1602.020089] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Deleting the datastore file [datastore1] 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1602.020089] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b49a5a61-d3c6-4cf6-a3d4-bd5ac7460acc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.026877] env[63371]: DEBUG oslo_vmware.api [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1602.026877] env[63371]: value = "task-1774293" [ 1602.026877] env[63371]: _type = "Task" [ 1602.026877] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.037142] env[63371]: DEBUG oslo_vmware.api [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774293, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.111895] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.112280] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.112530] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.112722] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.112914] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1602.115125] env[63371]: INFO nova.compute.manager [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Terminating instance [ 1602.116904] env[63371]: DEBUG nova.compute.manager [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1602.117124] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1602.117964] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c161c4c-266f-49de-b84b-ebdc4d01642a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.125564] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1602.125807] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca8c93b2-83b9-4d04-a7ac-3dc6fd3271c7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.132122] env[63371]: DEBUG oslo_vmware.api [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1602.132122] env[63371]: value = "task-1774294" [ 1602.132122] env[63371]: _type = "Task" [ 1602.132122] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.140123] env[63371]: DEBUG oslo_vmware.api [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774294, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.143598] env[63371]: DEBUG nova.compute.utils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1602.145143] env[63371]: DEBUG nova.compute.manager [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1602.145362] env[63371]: DEBUG nova.network.neutron [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1602.150503] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Acquiring lock "refresh_cache-44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.150503] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Acquired lock "refresh_cache-44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.150639] env[63371]: DEBUG nova.network.neutron [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1602.198821] env[63371]: DEBUG nova.policy [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b57536ed1e3e48cf86a8ec224a0aa3d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22fd1634f21c45efa8606cf6c339a790', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1602.241115] env[63371]: DEBUG oslo_vmware.api [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774291, 'name': PowerOffVM_Task, 'duration_secs': 0.242674} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.244026] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1602.244151] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1602.244618] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f75d33dd-f9d3-429e-95c1-12f24b366f4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.435698] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1602.435698] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1602.435698] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Deleting the datastore file [datastore1] 3bd1c148-a48d-402c-bd76-2cb1d38b49f7 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1602.435896] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a3fd7ba-b6ec-49e6-b898-1bdf3b465a17 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.445702] env[63371]: DEBUG oslo_vmware.api [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for the task: (returnval){ [ 1602.445702] env[63371]: value = "task-1774296" [ 1602.445702] env[63371]: _type = "Task" [ 1602.445702] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.453322] env[63371]: DEBUG oslo_vmware.api [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774296, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.539611] env[63371]: DEBUG oslo_vmware.api [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774293, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.272049} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.540421] env[63371]: DEBUG nova.network.neutron [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Successfully created port: df0bcb02-c7dd-42fe-96f3-e45841fdf782 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1602.542287] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1602.542633] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1602.542736] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1602.542923] env[63371]: INFO nova.compute.manager [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1602.543206] env[63371]: DEBUG oslo.service.loopingcall [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1602.543514] env[63371]: DEBUG nova.compute.manager [-] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1602.543514] env[63371]: DEBUG nova.network.neutron [-] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1602.575689] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0197ff54-33cb-4e57-a9a8-21c19e728455 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.586029] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a8d8b8-35be-4810-8175-8f64badb1094 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.620539] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8635f539-25ff-4127-8c9c-1bf680a06273 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.626885] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1602.627152] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1602.648223] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c30f956-4c2b-4edb-9d68-16a3a5b36e26 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.658140] env[63371]: DEBUG nova.compute.manager [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1602.665055] env[63371]: DEBUG oslo_vmware.api [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774294, 'name': PowerOffVM_Task, 'duration_secs': 0.203503} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.665055] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1602.665055] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1602.665299] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70b5e188-a0a9-4b1e-a59d-2ce26bd007e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.676630] env[63371]: DEBUG nova.compute.provider_tree [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1602.705044] env[63371]: DEBUG nova.network.neutron [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1602.753018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1602.753018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1602.753018] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleting the datastore file [datastore1] 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1602.753018] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc6d14c3-9c2e-4833-9af4-52201f6a50d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.758689] env[63371]: DEBUG oslo_vmware.api [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1602.758689] env[63371]: value = "task-1774298" [ 1602.758689] env[63371]: _type = "Task" [ 1602.758689] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.767263] env[63371]: DEBUG oslo_vmware.api [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774298, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.856532] env[63371]: DEBUG nova.compute.manager [req-c8272250-a36c-428d-8fb6-a3f50317d909 req-4b36e899-c0cb-4634-933a-5479661e6c3f service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Received event network-vif-deleted-e13a7d6d-6643-4b64-a4b1-2a59397c5307 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1602.856736] env[63371]: INFO nova.compute.manager [req-c8272250-a36c-428d-8fb6-a3f50317d909 req-4b36e899-c0cb-4634-933a-5479661e6c3f service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Neutron deleted interface e13a7d6d-6643-4b64-a4b1-2a59397c5307; detaching it from the instance and deleting it from the info cache [ 1602.856904] env[63371]: DEBUG nova.network.neutron [req-c8272250-a36c-428d-8fb6-a3f50317d909 req-4b36e899-c0cb-4634-933a-5479661e6c3f service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.955152] env[63371]: DEBUG oslo_vmware.api [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Task: {'id': task-1774296, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203647} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.955439] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1602.955626] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1602.955889] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1602.956104] env[63371]: INFO nova.compute.manager [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1602.956372] env[63371]: DEBUG oslo.service.loopingcall [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1602.956571] env[63371]: DEBUG nova.compute.manager [-] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1602.956664] env[63371]: DEBUG nova.network.neutron [-] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1602.961229] env[63371]: DEBUG nova.network.neutron [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Updating instance_info_cache with network_info: [{"id": "b12875f1-bdc7-4980-9223-c0a2fee47b86", "address": "fa:16:3e:e0:2e:e7", "network": {"id": "ec9bd35d-8a6f-4ed0-83c5-18c1ba123bd4", "bridge": "br-int", "label": "tempest-ServersTestJSON-1570224473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab55ffce414a461aa6f77a83100b7346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb12875f1-bd", "ovs_interfaceid": "b12875f1-bdc7-4980-9223-c0a2fee47b86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.133895] env[63371]: DEBUG nova.compute.manager [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1603.182212] env[63371]: DEBUG nova.scheduler.client.report [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1603.268360] env[63371]: DEBUG oslo_vmware.api [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774298, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206289} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.268613] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1603.268799] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1603.268973] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1603.269165] env[63371]: INFO nova.compute.manager [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1603.269407] env[63371]: DEBUG oslo.service.loopingcall [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1603.269589] env[63371]: DEBUG nova.compute.manager [-] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1603.269683] env[63371]: DEBUG nova.network.neutron [-] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1603.329181] env[63371]: DEBUG nova.network.neutron [-] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.362018] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ae4c437-dd2f-4959-acfb-a5d0233d8492 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.375901] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7a7825-04ea-4ad3-8f6e-e0697cedeecc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.412304] env[63371]: DEBUG nova.compute.manager [req-c8272250-a36c-428d-8fb6-a3f50317d909 req-4b36e899-c0cb-4634-933a-5479661e6c3f service nova] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Detach interface failed, port_id=e13a7d6d-6643-4b64-a4b1-2a59397c5307, reason: Instance 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1603.464148] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Releasing lock "refresh_cache-44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1603.464489] env[63371]: DEBUG nova.compute.manager [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Instance network_info: |[{"id": "b12875f1-bdc7-4980-9223-c0a2fee47b86", "address": "fa:16:3e:e0:2e:e7", "network": {"id": "ec9bd35d-8a6f-4ed0-83c5-18c1ba123bd4", "bridge": "br-int", "label": "tempest-ServersTestJSON-1570224473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab55ffce414a461aa6f77a83100b7346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb12875f1-bd", "ovs_interfaceid": "b12875f1-bdc7-4980-9223-c0a2fee47b86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1603.464926] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e0:2e:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a9abd00f-2cea-40f8-9804-a56b6431192d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b12875f1-bdc7-4980-9223-c0a2fee47b86', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1603.473510] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Creating folder: Project (ab55ffce414a461aa6f77a83100b7346). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1603.473792] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a85ff69a-ddc7-41cb-baca-6affedc62750 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.485661] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Created folder: Project (ab55ffce414a461aa6f77a83100b7346) in parent group-v368199. [ 1603.485800] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Creating folder: Instances. Parent ref: group-v368392. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1603.486050] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a9ed542b-e48b-4a38-99a8-36e54da3b1c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.496043] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Created folder: Instances in parent group-v368392. [ 1603.496440] env[63371]: DEBUG oslo.service.loopingcall [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1603.496721] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1603.497036] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-40b277e0-df1e-47f7-95f9-4af072eb62a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.526102] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1603.526102] env[63371]: value = "task-1774301" [ 1603.526102] env[63371]: _type = "Task" [ 1603.526102] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.534403] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774301, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.584470] env[63371]: DEBUG nova.compute.manager [req-269c504c-a1d3-442e-85fd-4b0058856465 req-3185c435-b980-4c61-9da1-388f24363221 service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Received event network-changed-b12875f1-bdc7-4980-9223-c0a2fee47b86 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1603.584707] env[63371]: DEBUG nova.compute.manager [req-269c504c-a1d3-442e-85fd-4b0058856465 req-3185c435-b980-4c61-9da1-388f24363221 service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Refreshing instance network info cache due to event network-changed-b12875f1-bdc7-4980-9223-c0a2fee47b86. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1603.584935] env[63371]: DEBUG oslo_concurrency.lockutils [req-269c504c-a1d3-442e-85fd-4b0058856465 req-3185c435-b980-4c61-9da1-388f24363221 service nova] Acquiring lock "refresh_cache-44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1603.585098] env[63371]: DEBUG oslo_concurrency.lockutils [req-269c504c-a1d3-442e-85fd-4b0058856465 req-3185c435-b980-4c61-9da1-388f24363221 service nova] Acquired lock "refresh_cache-44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1603.585339] env[63371]: DEBUG nova.network.neutron [req-269c504c-a1d3-442e-85fd-4b0058856465 req-3185c435-b980-4c61-9da1-388f24363221 service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Refreshing network info cache for port b12875f1-bdc7-4980-9223-c0a2fee47b86 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1603.655448] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.667496] env[63371]: DEBUG nova.compute.manager [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1603.688463] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.048s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.692678] env[63371]: DEBUG nova.virt.hardware [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1603.692951] env[63371]: DEBUG nova.virt.hardware [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1603.693139] env[63371]: DEBUG nova.virt.hardware [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1603.693385] env[63371]: DEBUG nova.virt.hardware [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1603.693598] env[63371]: DEBUG nova.virt.hardware [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1603.693786] env[63371]: DEBUG nova.virt.hardware [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1603.694049] env[63371]: DEBUG nova.virt.hardware [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1603.694219] env[63371]: DEBUG nova.virt.hardware [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1603.694406] env[63371]: DEBUG nova.virt.hardware [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1603.694585] env[63371]: DEBUG nova.virt.hardware [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1603.694788] env[63371]: DEBUG nova.virt.hardware [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1603.695598] env[63371]: DEBUG oslo_concurrency.lockutils [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.295s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.695598] env[63371]: DEBUG nova.objects.instance [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lazy-loading 'resources' on Instance uuid e05c7187-b4d6-481e-8bce-deb557dde6a8 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1603.697534] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a397b83-1ed5-41e0-b941-9d48b38ccc3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.707628] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1be1b50-1797-4569-8a4b-9e5d714f12f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.716053] env[63371]: INFO nova.scheduler.client.report [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleted allocations for instance 1c93487b-6d8f-424d-8b95-10bfb894c609 [ 1603.833381] env[63371]: INFO nova.compute.manager [-] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Took 1.29 seconds to deallocate network for instance. [ 1603.874225] env[63371]: DEBUG nova.network.neutron [-] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.022443] env[63371]: DEBUG nova.network.neutron [-] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.035179] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774301, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.232925] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d1944b76-0baf-4b9d-8de9-3c9cd669cc18 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "1c93487b-6d8f-424d-8b95-10bfb894c609" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.378s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1604.340481] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.377987] env[63371]: INFO nova.compute.manager [-] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Took 1.42 seconds to deallocate network for instance. [ 1604.439651] env[63371]: DEBUG nova.network.neutron [req-269c504c-a1d3-442e-85fd-4b0058856465 req-3185c435-b980-4c61-9da1-388f24363221 service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Updated VIF entry in instance network info cache for port b12875f1-bdc7-4980-9223-c0a2fee47b86. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1604.440254] env[63371]: DEBUG nova.network.neutron [req-269c504c-a1d3-442e-85fd-4b0058856465 req-3185c435-b980-4c61-9da1-388f24363221 service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Updating instance_info_cache with network_info: [{"id": "b12875f1-bdc7-4980-9223-c0a2fee47b86", "address": "fa:16:3e:e0:2e:e7", "network": {"id": "ec9bd35d-8a6f-4ed0-83c5-18c1ba123bd4", "bridge": "br-int", "label": "tempest-ServersTestJSON-1570224473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab55ffce414a461aa6f77a83100b7346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb12875f1-bd", "ovs_interfaceid": "b12875f1-bdc7-4980-9223-c0a2fee47b86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.460713] env[63371]: DEBUG nova.network.neutron [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Successfully updated port: df0bcb02-c7dd-42fe-96f3-e45841fdf782 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1604.525563] env[63371]: INFO nova.compute.manager [-] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Took 1.26 seconds to deallocate network for instance. [ 1604.541631] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774301, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.617123] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec446f8-2aa9-465a-9df8-b3951b8eb145 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.624416] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edaaa76e-d815-4893-800f-074338a09a4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.653754] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550ea14b-6269-44e4-bc31-cf204a47f72e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.660630] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcd077d0-e8aa-4888-99a5-487c0a5b613e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.674626] env[63371]: DEBUG nova.compute.provider_tree [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1604.886303] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1604.904666] env[63371]: DEBUG nova.compute.manager [req-7b1a0970-6943-466f-8a09-1c834ad2b7e0 req-1a801532-6da0-4160-8fdd-50cb5d5991a7 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Received event network-vif-deleted-4a9c8b81-0ba5-4746-8695-2464b801b783 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1604.904879] env[63371]: DEBUG nova.compute.manager [req-7b1a0970-6943-466f-8a09-1c834ad2b7e0 req-1a801532-6da0-4160-8fdd-50cb5d5991a7 service nova] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Received event network-vif-deleted-02145be4-05da-4b04-95ab-e7aa717efb9a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1604.945799] env[63371]: DEBUG oslo_concurrency.lockutils [req-269c504c-a1d3-442e-85fd-4b0058856465 req-3185c435-b980-4c61-9da1-388f24363221 service nova] Releasing lock "refresh_cache-44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1604.946091] env[63371]: DEBUG nova.compute.manager [req-269c504c-a1d3-442e-85fd-4b0058856465 req-3185c435-b980-4c61-9da1-388f24363221 service nova] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Received event network-vif-deleted-a25f7a2e-b96f-4966-a665-76f86d05a00d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1604.946271] env[63371]: INFO nova.compute.manager [req-269c504c-a1d3-442e-85fd-4b0058856465 req-3185c435-b980-4c61-9da1-388f24363221 service nova] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Neutron deleted interface a25f7a2e-b96f-4966-a665-76f86d05a00d; detaching it from the instance and deleting it from the info cache [ 1604.946441] env[63371]: DEBUG nova.network.neutron [req-269c504c-a1d3-442e-85fd-4b0058856465 req-3185c435-b980-4c61-9da1-388f24363221 service nova] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1604.964500] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "refresh_cache-fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1604.965154] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquired lock "refresh_cache-fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1604.965154] env[63371]: DEBUG nova.network.neutron [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1605.037407] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.037527] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774301, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.177985] env[63371]: DEBUG nova.scheduler.client.report [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1605.212101] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.212439] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.212724] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.212909] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.213085] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.215394] env[63371]: INFO nova.compute.manager [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Terminating instance [ 1605.217012] env[63371]: DEBUG nova.compute.manager [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1605.217227] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1605.218044] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a28bec3b-5fb4-4ca6-b877-cc5b7707922a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.226451] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1605.226451] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-105533bf-7432-4af6-9376-3ff5453fb44a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.233059] env[63371]: DEBUG oslo_vmware.api [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1605.233059] env[63371]: value = "task-1774302" [ 1605.233059] env[63371]: _type = "Task" [ 1605.233059] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.240710] env[63371]: DEBUG oslo_vmware.api [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774302, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.449313] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-597abf46-e740-468f-ad1d-13f5c802f5c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.458281] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d84a389-7b4c-4722-86f2-3fc723b907c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.495144] env[63371]: DEBUG nova.compute.manager [req-269c504c-a1d3-442e-85fd-4b0058856465 req-3185c435-b980-4c61-9da1-388f24363221 service nova] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Detach interface failed, port_id=a25f7a2e-b96f-4966-a665-76f86d05a00d, reason: Instance 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1605.500942] env[63371]: DEBUG nova.network.neutron [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1605.537688] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774301, 'name': CreateVM_Task, 'duration_secs': 1.519965} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.537688] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1605.538107] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1605.538284] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1605.538607] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1605.538861] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22c9eb52-3b74-463f-804c-39a4fb6b8fea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.544031] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Waiting for the task: (returnval){ [ 1605.544031] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ff0404-30e3-4f07-9ae2-9dbefbccdcdd" [ 1605.544031] env[63371]: _type = "Task" [ 1605.544031] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.551747] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ff0404-30e3-4f07-9ae2-9dbefbccdcdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.683141] env[63371]: DEBUG oslo_concurrency.lockutils [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.988s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.686250] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.731s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.687047] env[63371]: INFO nova.compute.claims [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1605.706210] env[63371]: INFO nova.scheduler.client.report [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Deleted allocations for instance e05c7187-b4d6-481e-8bce-deb557dde6a8 [ 1605.733296] env[63371]: DEBUG nova.compute.manager [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Received event network-vif-plugged-df0bcb02-c7dd-42fe-96f3-e45841fdf782 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1605.733296] env[63371]: DEBUG oslo_concurrency.lockutils [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] Acquiring lock "fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1605.733296] env[63371]: DEBUG oslo_concurrency.lockutils [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] Lock "fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1605.733296] env[63371]: DEBUG oslo_concurrency.lockutils [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] Lock "fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.733296] env[63371]: DEBUG nova.compute.manager [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] No waiting events found dispatching network-vif-plugged-df0bcb02-c7dd-42fe-96f3-e45841fdf782 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1605.733296] env[63371]: WARNING nova.compute.manager [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Received unexpected event network-vif-plugged-df0bcb02-c7dd-42fe-96f3-e45841fdf782 for instance with vm_state building and task_state spawning. [ 1605.733296] env[63371]: DEBUG nova.compute.manager [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Received event network-changed-df0bcb02-c7dd-42fe-96f3-e45841fdf782 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1605.733296] env[63371]: DEBUG nova.compute.manager [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Refreshing instance network info cache due to event network-changed-df0bcb02-c7dd-42fe-96f3-e45841fdf782. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1605.733296] env[63371]: DEBUG oslo_concurrency.lockutils [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] Acquiring lock "refresh_cache-fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1605.743077] env[63371]: DEBUG oslo_vmware.api [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774302, 'name': PowerOffVM_Task, 'duration_secs': 0.200575} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.745569] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1605.745766] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1605.746270] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-63a3300d-a04a-422c-b92a-6be62ffb7a04 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.827303] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1605.827498] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1605.827760] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleting the datastore file [datastore1] e6cd62ce-f6d2-4e5b-acbc-7527a94e0932 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1605.828073] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d34fb78-5bf7-468e-97b8-26ddfd8d984e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.834157] env[63371]: DEBUG oslo_vmware.api [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1605.834157] env[63371]: value = "task-1774304" [ 1605.834157] env[63371]: _type = "Task" [ 1605.834157] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1605.843200] env[63371]: DEBUG oslo_vmware.api [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774304, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.853326] env[63371]: DEBUG nova.network.neutron [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Updating instance_info_cache with network_info: [{"id": "df0bcb02-c7dd-42fe-96f3-e45841fdf782", "address": "fa:16:3e:bc:47:fd", "network": {"id": "1dc1e53b-b865-4642-b667-e771524c6438", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-562533890-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fd1634f21c45efa8606cf6c339a790", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0bcb02-c7", "ovs_interfaceid": "df0bcb02-c7dd-42fe-96f3-e45841fdf782", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1606.054224] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ff0404-30e3-4f07-9ae2-9dbefbccdcdd, 'name': SearchDatastore_Task, 'duration_secs': 0.010097} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.054538] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.054773] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1606.055015] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.055168] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.055343] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1606.055604] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-051503e3-1a2a-4ca8-8711-b63c69f13d0f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.064153] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1606.064153] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1606.065472] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e9fdd5c-4235-476c-aef6-ca899e57398b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.071035] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Waiting for the task: (returnval){ [ 1606.071035] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5266a1a0-39b1-d41f-3cb8-85749587a33b" [ 1606.071035] env[63371]: _type = "Task" [ 1606.071035] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.079122] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5266a1a0-39b1-d41f-3cb8-85749587a33b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.213063] env[63371]: DEBUG oslo_concurrency.lockutils [None req-19eab50e-440f-4ae8-91f4-f74602477065 tempest-InstanceActionsV221TestJSON-2096132979 tempest-InstanceActionsV221TestJSON-2096132979-project-member] Lock "e05c7187-b4d6-481e-8bce-deb557dde6a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.759s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1606.344195] env[63371]: DEBUG oslo_vmware.api [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774304, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123951} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.344464] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1606.344651] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1606.344826] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1606.345000] env[63371]: INFO nova.compute.manager [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1606.345257] env[63371]: DEBUG oslo.service.loopingcall [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.345448] env[63371]: DEBUG nova.compute.manager [-] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1606.345545] env[63371]: DEBUG nova.network.neutron [-] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1606.355897] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Releasing lock "refresh_cache-fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.356208] env[63371]: DEBUG nova.compute.manager [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Instance network_info: |[{"id": "df0bcb02-c7dd-42fe-96f3-e45841fdf782", "address": "fa:16:3e:bc:47:fd", "network": {"id": "1dc1e53b-b865-4642-b667-e771524c6438", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-562533890-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fd1634f21c45efa8606cf6c339a790", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0bcb02-c7", "ovs_interfaceid": "df0bcb02-c7dd-42fe-96f3-e45841fdf782", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1606.356481] env[63371]: DEBUG oslo_concurrency.lockutils [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] Acquired lock "refresh_cache-fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.356657] env[63371]: DEBUG nova.network.neutron [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Refreshing network info cache for port df0bcb02-c7dd-42fe-96f3-e45841fdf782 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1606.357814] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:47:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d2e4070-a78e-4d08-a104-b6312ab65577', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df0bcb02-c7dd-42fe-96f3-e45841fdf782', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1606.366066] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Creating folder: Project (22fd1634f21c45efa8606cf6c339a790). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1606.366838] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e917fb8-51a2-4f23-bdf8-b1bfa5da8df5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.378302] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Created folder: Project (22fd1634f21c45efa8606cf6c339a790) in parent group-v368199. [ 1606.378494] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Creating folder: Instances. Parent ref: group-v368395. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1606.378725] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-982ec5a8-a364-46bf-b912-8ef11cb20609 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.389554] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Created folder: Instances in parent group-v368395. [ 1606.389783] env[63371]: DEBUG oslo.service.loopingcall [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1606.390075] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1606.390226] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-334b39e5-e6e6-4dbf-86ca-772a4f39c543 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.408772] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1606.408772] env[63371]: value = "task-1774307" [ 1606.408772] env[63371]: _type = "Task" [ 1606.408772] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.419723] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774307, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.580768] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5266a1a0-39b1-d41f-3cb8-85749587a33b, 'name': SearchDatastore_Task, 'duration_secs': 0.00859} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.581630] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fcc424e-7922-405d-87dd-15817fffe769 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.587529] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Waiting for the task: (returnval){ [ 1606.587529] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521bf6b7-e9d7-8de4-5968-cf3b97ad262f" [ 1606.587529] env[63371]: _type = "Task" [ 1606.587529] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.595655] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521bf6b7-e9d7-8de4-5968-cf3b97ad262f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.919725] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774307, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.075597] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f7bca80-bc7e-4d6f-ab1b-72253b328724 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.088387] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2afd96a-7cc7-48a5-a22f-77ac9400d6ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.098995] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521bf6b7-e9d7-8de4-5968-cf3b97ad262f, 'name': SearchDatastore_Task, 'duration_secs': 0.008733} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.123250] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.123589] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6/44a392e4-32c1-4aaf-8dc0-7df50c1a28c6.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1607.124718] env[63371]: DEBUG nova.network.neutron [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Updated VIF entry in instance network info cache for port df0bcb02-c7dd-42fe-96f3-e45841fdf782. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1607.125106] env[63371]: DEBUG nova.network.neutron [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Updating instance_info_cache with network_info: [{"id": "df0bcb02-c7dd-42fe-96f3-e45841fdf782", "address": "fa:16:3e:bc:47:fd", "network": {"id": "1dc1e53b-b865-4642-b667-e771524c6438", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-562533890-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fd1634f21c45efa8606cf6c339a790", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0bcb02-c7", "ovs_interfaceid": "df0bcb02-c7dd-42fe-96f3-e45841fdf782", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.126393] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-69a8730a-fa99-4215-ae54-1379ce4821e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.129158] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b01fad-46c9-4e85-85d2-d573eb9abfbc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.140594] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-061097c1-ab14-4033-833f-b9822a6a0dde {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.144519] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Waiting for the task: (returnval){ [ 1607.144519] env[63371]: value = "task-1774308" [ 1607.144519] env[63371]: _type = "Task" [ 1607.144519] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.156505] env[63371]: DEBUG nova.compute.provider_tree [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1607.162396] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.220049] env[63371]: DEBUG nova.network.neutron [-] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.419166] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774307, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.634725] env[63371]: DEBUG oslo_concurrency.lockutils [req-0f64ee25-6890-4b21-a849-cae56cc34664 req-d8bf6e6a-de21-4d06-9cb2-7ee47f54ba8b service nova] Releasing lock "refresh_cache-fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.654379] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774308, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.677744] env[63371]: ERROR nova.scheduler.client.report [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [req-e27dfc5f-bcdb-4e4e-a3f5-ed33707dd0eb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e27dfc5f-bcdb-4e4e-a3f5-ed33707dd0eb"}]} [ 1607.694580] env[63371]: DEBUG nova.scheduler.client.report [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1607.711901] env[63371]: DEBUG nova.scheduler.client.report [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1607.712165] env[63371]: DEBUG nova.compute.provider_tree [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1607.722974] env[63371]: INFO nova.compute.manager [-] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Took 1.38 seconds to deallocate network for instance. [ 1607.726291] env[63371]: DEBUG nova.scheduler.client.report [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1607.751716] env[63371]: DEBUG nova.scheduler.client.report [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1607.841344] env[63371]: DEBUG nova.compute.manager [req-44d4d3f0-7eaf-4a9b-b278-8dda7ebefce7 req-a95ebc5d-e988-42ae-9c4c-0ecaeb11011d service nova] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Received event network-vif-deleted-9a55b6f2-f084-4989-9b8c-434c1a1deab6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1607.921911] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774307, 'name': CreateVM_Task, 'duration_secs': 1.339558} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.922210] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1607.922659] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.922987] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.923253] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1607.926443] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbd72407-a366-4740-926f-2b4c5a1e6c9f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.933021] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1607.933021] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5241c675-d7b6-f1ef-fc04-53af64d83f15" [ 1607.933021] env[63371]: _type = "Task" [ 1607.933021] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.941580] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5241c675-d7b6-f1ef-fc04-53af64d83f15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.158178] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774308, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.184028] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35be3dbc-2086-41e2-acfe-cbfd24849986 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.190529] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b22303-c1a0-4c63-9aff-4d8f110f721b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.221582] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7119a506-bfd0-4831-b345-1cc687e27469 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.228554] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc22f034-b3a5-4df7-8f76-cbc67ab1d794 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.232867] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.242627] env[63371]: DEBUG nova.compute.provider_tree [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1608.443015] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5241c675-d7b6-f1ef-fc04-53af64d83f15, 'name': SearchDatastore_Task, 'duration_secs': 0.059378} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.443454] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.443741] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1608.444016] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.444206] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.444424] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1608.444716] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3beaab59-f1e2-4608-8ded-6c034f459f8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.452419] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1608.452591] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1608.453280] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8009ce7-52ed-4950-9ac5-64bad1ce2f66 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.458119] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1608.458119] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]527b825d-4cb7-75f8-5b63-1a294dc8ab74" [ 1608.458119] env[63371]: _type = "Task" [ 1608.458119] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.465131] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527b825d-4cb7-75f8-5b63-1a294dc8ab74, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.658248] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774308, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.036432} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.658512] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6/44a392e4-32c1-4aaf-8dc0-7df50c1a28c6.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1608.658761] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1608.659063] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8e2d060-7779-42cd-a857-4378d6e487fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.665337] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Waiting for the task: (returnval){ [ 1608.665337] env[63371]: value = "task-1774309" [ 1608.665337] env[63371]: _type = "Task" [ 1608.665337] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.672965] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774309, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.775653] env[63371]: DEBUG nova.scheduler.client.report [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 99 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1608.776011] env[63371]: DEBUG nova.compute.provider_tree [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 99 to 100 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1608.776229] env[63371]: DEBUG nova.compute.provider_tree [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1608.968922] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527b825d-4cb7-75f8-5b63-1a294dc8ab74, 'name': SearchDatastore_Task, 'duration_secs': 0.008043} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.969785] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9c2403d-7a49-4633-9e42-4fb18f4d03ab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.974950] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1608.974950] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52354d0b-79e9-3e6c-87de-c478be5a81f0" [ 1608.974950] env[63371]: _type = "Task" [ 1608.974950] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.983721] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52354d0b-79e9-3e6c-87de-c478be5a81f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.176020] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774309, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068362} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.176020] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1609.176535] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d436b1d1-1720-4ce3-b476-9c9f197e89f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.205640] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6/44a392e4-32c1-4aaf-8dc0-7df50c1a28c6.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1609.205640] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95171533-edd8-41f8-bf41-0734d9fc345f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.225935] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Waiting for the task: (returnval){ [ 1609.225935] env[63371]: value = "task-1774310" [ 1609.225935] env[63371]: _type = "Task" [ 1609.225935] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.234968] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774310, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.282260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.597s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.282790] env[63371]: DEBUG nova.compute.manager [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1609.285559] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.964s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.286989] env[63371]: INFO nova.compute.claims [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1609.485604] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52354d0b-79e9-3e6c-87de-c478be5a81f0, 'name': SearchDatastore_Task, 'duration_secs': 0.010175} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.485883] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.486153] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e/fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1609.486407] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-152bf26d-b408-45a5-9bec-7a7571112af2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.492537] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1609.492537] env[63371]: value = "task-1774311" [ 1609.492537] env[63371]: _type = "Task" [ 1609.492537] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.500043] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774311, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.736315] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774310, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.794100] env[63371]: DEBUG nova.compute.utils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1609.796059] env[63371]: DEBUG nova.compute.manager [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1609.796059] env[63371]: DEBUG nova.network.neutron [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1609.855031] env[63371]: DEBUG nova.policy [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '14b201b8d738471295e655e2ee2cad8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e693d73d70140c2ba065de2b60838c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1610.003158] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774311, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463656} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.003489] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e/fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1610.003635] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1610.003915] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-541ec9f4-3b4d-4ebf-b94e-c5483c2b4469 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.010247] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1610.010247] env[63371]: value = "task-1774312" [ 1610.010247] env[63371]: _type = "Task" [ 1610.010247] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.018309] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774312, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.147727] env[63371]: DEBUG nova.network.neutron [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Successfully created port: d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1610.236042] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774310, 'name': ReconfigVM_Task, 'duration_secs': 0.576489} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.236326] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6/44a392e4-32c1-4aaf-8dc0-7df50c1a28c6.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1610.236933] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2d3375a8-2ed1-48d1-ac3f-2c835e6370e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.242841] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Waiting for the task: (returnval){ [ 1610.242841] env[63371]: value = "task-1774313" [ 1610.242841] env[63371]: _type = "Task" [ 1610.242841] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.250105] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774313, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.301546] env[63371]: DEBUG nova.compute.manager [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1610.522691] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774312, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064992} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.522691] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1610.523434] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7fc560-9a95-42e4-b6c1-4a4239010e4a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.546681] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e/fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1610.548957] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0f6a813-db52-4738-a2ce-f3dde0588334 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.568492] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1610.568492] env[63371]: value = "task-1774314" [ 1610.568492] env[63371]: _type = "Task" [ 1610.568492] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.577209] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774314, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.659825] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f94d09-e6fc-4688-ba4b-52774e59cabe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.667339] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51548521-210b-4837-a714-ae9aefc765cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.697528] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d511ba-59db-4d99-a36d-1886e5dfc98b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.705493] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-878e0f85-7808-446c-b56b-4df42d16fbca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.720497] env[63371]: DEBUG nova.compute.provider_tree [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1610.752440] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774313, 'name': Rename_Task, 'duration_secs': 0.15274} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.752734] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1610.752976] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-887e99d4-04d6-4a54-959c-2f41ccdcfd6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.759579] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Waiting for the task: (returnval){ [ 1610.759579] env[63371]: value = "task-1774315" [ 1610.759579] env[63371]: _type = "Task" [ 1610.759579] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.773943] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774315, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.078853] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774314, 'name': ReconfigVM_Task, 'duration_secs': 0.275459} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.079159] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Reconfigured VM instance instance-00000046 to attach disk [datastore1] fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e/fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1611.079791] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e3402e14-613c-4ffb-86c1-5729fb954921 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.086591] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1611.086591] env[63371]: value = "task-1774316" [ 1611.086591] env[63371]: _type = "Task" [ 1611.086591] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.094328] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774316, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.224274] env[63371]: DEBUG nova.scheduler.client.report [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1611.269946] env[63371]: DEBUG oslo_vmware.api [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774315, 'name': PowerOnVM_Task, 'duration_secs': 0.447015} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.270244] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1611.270450] env[63371]: INFO nova.compute.manager [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Took 10.08 seconds to spawn the instance on the hypervisor. [ 1611.270627] env[63371]: DEBUG nova.compute.manager [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1611.271409] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e8c742-ced3-4c4d-b4d0-d2f87409d2c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.313713] env[63371]: DEBUG nova.compute.manager [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1611.339719] env[63371]: DEBUG nova.virt.hardware [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1611.339995] env[63371]: DEBUG nova.virt.hardware [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1611.340172] env[63371]: DEBUG nova.virt.hardware [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1611.340378] env[63371]: DEBUG nova.virt.hardware [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1611.340522] env[63371]: DEBUG nova.virt.hardware [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1611.340668] env[63371]: DEBUG nova.virt.hardware [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1611.340870] env[63371]: DEBUG nova.virt.hardware [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1611.341030] env[63371]: DEBUG nova.virt.hardware [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1611.341197] env[63371]: DEBUG nova.virt.hardware [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1611.341355] env[63371]: DEBUG nova.virt.hardware [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1611.341557] env[63371]: DEBUG nova.virt.hardware [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1611.342410] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f22355f-f12e-4ec0-9cef-ff56db03c6e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.349873] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b662d59b-9554-43a1-b43b-a06e8eeb84d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.596752] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774316, 'name': Rename_Task, 'duration_secs': 0.141398} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.598044] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1611.599171] env[63371]: DEBUG nova.compute.manager [req-3c581c10-1da9-47e5-87d3-81bb8edda5ba req-88beddec-b52a-43f6-bdd5-7dce4842151a service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Received event network-vif-plugged-d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1611.599506] env[63371]: DEBUG oslo_concurrency.lockutils [req-3c581c10-1da9-47e5-87d3-81bb8edda5ba req-88beddec-b52a-43f6-bdd5-7dce4842151a service nova] Acquiring lock "9985dbcd-4498-4629-aae5-5e1933307c50-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.599824] env[63371]: DEBUG oslo_concurrency.lockutils [req-3c581c10-1da9-47e5-87d3-81bb8edda5ba req-88beddec-b52a-43f6-bdd5-7dce4842151a service nova] Lock "9985dbcd-4498-4629-aae5-5e1933307c50-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.600131] env[63371]: DEBUG oslo_concurrency.lockutils [req-3c581c10-1da9-47e5-87d3-81bb8edda5ba req-88beddec-b52a-43f6-bdd5-7dce4842151a service nova] Lock "9985dbcd-4498-4629-aae5-5e1933307c50-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.600406] env[63371]: DEBUG nova.compute.manager [req-3c581c10-1da9-47e5-87d3-81bb8edda5ba req-88beddec-b52a-43f6-bdd5-7dce4842151a service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] No waiting events found dispatching network-vif-plugged-d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1611.600696] env[63371]: WARNING nova.compute.manager [req-3c581c10-1da9-47e5-87d3-81bb8edda5ba req-88beddec-b52a-43f6-bdd5-7dce4842151a service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Received unexpected event network-vif-plugged-d3f41a80-52de-46a5-ac15-9a26e6710908 for instance with vm_state building and task_state spawning. [ 1611.601075] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-091f64cd-2fbf-45f2-a63e-f9ccb7c1b881 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.613020] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1611.613020] env[63371]: value = "task-1774317" [ 1611.613020] env[63371]: _type = "Task" [ 1611.613020] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.618600] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774317, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.678790] env[63371]: DEBUG nova.network.neutron [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Successfully updated port: d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1611.729363] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.444s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.729913] env[63371]: DEBUG nova.compute.manager [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1611.732751] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.339s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.790033] env[63371]: INFO nova.compute.manager [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Took 45.43 seconds to build instance. [ 1612.061893] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-421e3440-ae7d-4d88-acd6-a8070c1ef337 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.069914] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466d574d-329c-4d37-90f9-77f8454de1b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.101668] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9e5640-2c87-43fa-889d-a87666ba9465 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.109059] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5492c56-901e-457b-be00-3ee9fdab9f51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.125208] env[63371]: DEBUG nova.compute.provider_tree [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1612.129481] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774317, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.184031] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.184031] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.184031] env[63371]: DEBUG nova.network.neutron [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1612.237436] env[63371]: DEBUG nova.compute.utils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1612.238836] env[63371]: DEBUG nova.compute.manager [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1612.239016] env[63371]: DEBUG nova.network.neutron [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1612.291844] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83153fe1-47f2-4a61-9cee-6cc21c2a9a7b tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Lock "44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.947s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1612.317544] env[63371]: DEBUG nova.policy [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1b2f698ebd747d6a84ac3f3e05e97b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a5b81b233f640b186d9798ff57a4945', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1612.587423] env[63371]: DEBUG nova.network.neutron [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Successfully created port: 24bd2275-5bff-4a52-a3f0-63ef1b63b73b {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1612.623191] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774317, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.631280] env[63371]: DEBUG nova.scheduler.client.report [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1612.732912] env[63371]: DEBUG nova.network.neutron [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1612.743634] env[63371]: DEBUG nova.compute.manager [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1612.956209] env[63371]: DEBUG nova.network.neutron [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updating instance_info_cache with network_info: [{"id": "d3f41a80-52de-46a5-ac15-9a26e6710908", "address": "fa:16:3e:f6:cd:6b", "network": {"id": "9c25e5e9-468d-4d4c-93e0-c9815eff1c2e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-814005109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e693d73d70140c2ba065de2b60838c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3f41a80-52", "ovs_interfaceid": "d3f41a80-52de-46a5-ac15-9a26e6710908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.122963] env[63371]: DEBUG oslo_vmware.api [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774317, 'name': PowerOnVM_Task, 'duration_secs': 1.024485} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.123428] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1613.123428] env[63371]: INFO nova.compute.manager [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1613.123428] env[63371]: DEBUG nova.compute.manager [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1613.124889] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42ced9d-21d0-48eb-9dc5-d168ce6eda43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.136495] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.404s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.136669] env[63371]: INFO nova.compute.manager [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Successfully reverted task state from image_uploading on failure for instance. [ 1613.138926] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.056s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1613.139338] env[63371]: DEBUG nova.objects.instance [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lazy-loading 'resources' on Instance uuid 704978f9-3b24-4a73-8f64-b8e3e9e94a04 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server [None req-9a8a4123-d8a0-4f94-913c-b6dac7245b7d tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Exception during message handling: oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.vm.Snapshot:snapshot-368375' has already been deleted or has not been completely created [ 1613.144019] env[63371]: Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-368375' has already been deleted or has not been completely created' [ 1613.144019] env[63371]: Faults: [ManagedObjectNotFound] [ 1613.144019] env[63371]: Details: {'obj': 'snapshot-368375'} [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server response = request(managed_object, **kwargs) [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__ [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server return client.invoke(args, kwargs) [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server result = self.send(soapenv, timeout=timeout) [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server return self.process_reply(reply.message, None, None) [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server raise WebFault(fault, replyroot) [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server suds.WebFault: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-368375' has already been deleted or has not been completely created' [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server return api_method(*args, **kwargs) [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server raise exceptions.VimFaultException(fault_list, fault_string, [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.VimFaultException: The object 'vim.vm.Snapshot:snapshot-368375' has already been deleted or has not been completely created [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-368375' has already been deleted or has not been completely created' [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server Faults: [ManagedObjectNotFound] [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server Details: {'obj': 'snapshot-368375'} [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 172, in _process_incoming [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1613.144019] env[63371]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1453, in decorated_function [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 233, in decorated_function [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 230, in decorated_function [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server return function(self, context, image_id, instance, [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4443, in snapshot_instance [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server self._snapshot_instance(context, image_id, instance, [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 4476, in _snapshot_instance [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server self.driver.snapshot(context, instance, image_id, [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 571, in snapshot [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server self._vmops.snapshot(context, instance, image_id, update_task_state) [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 1033, in snapshot [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server self._delete_vm_snapshot(instance, vm_ref, snapshot_ref) [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/decorator.py", line 232, in fun [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server return caller(func, *(extras + args), **kw) [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 124, in retry_if_task_in_progress [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server f(*args, **kwargs) [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 933, in _delete_vm_snapshot [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server delete_snapshot_task = self._session._call_method( [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 127, in _call_method [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception() as ctxt: [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server raise self.value [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 125, in _call_method [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server return self.invoke_api(module, method, *args, **kwargs) [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server return _invoke_api(module, method, *args, **kwargs) [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server idle = self.f(*self.args, **self.kw) [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api [ 1613.145463] env[63371]: ERROR oslo_messaging.rpc.server raise clazz(str(excep), [ 1613.146801] env[63371]: ERROR oslo_messaging.rpc.server oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.vm.Snapshot:snapshot-368375' has already been deleted or has not been completely created [ 1613.146801] env[63371]: ERROR oslo_messaging.rpc.server Cause: Server raised fault: 'The object 'vim.vm.Snapshot:snapshot-368375' has already been deleted or has not been completely created' [ 1613.146801] env[63371]: ERROR oslo_messaging.rpc.server Faults: [ManagedObjectNotFound] [ 1613.146801] env[63371]: ERROR oslo_messaging.rpc.server Details: {'obj': 'snapshot-368375'} [ 1613.146801] env[63371]: ERROR oslo_messaging.rpc.server [ 1613.272743] env[63371]: DEBUG nova.compute.manager [req-6739e3a3-b3b3-4b0f-9b72-9377b7b81adf req-730fb573-b6f2-4415-ae20-2f73cffccaed service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Received event network-changed-b12875f1-bdc7-4980-9223-c0a2fee47b86 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1613.272942] env[63371]: DEBUG nova.compute.manager [req-6739e3a3-b3b3-4b0f-9b72-9377b7b81adf req-730fb573-b6f2-4415-ae20-2f73cffccaed service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Refreshing instance network info cache due to event network-changed-b12875f1-bdc7-4980-9223-c0a2fee47b86. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1613.273191] env[63371]: DEBUG oslo_concurrency.lockutils [req-6739e3a3-b3b3-4b0f-9b72-9377b7b81adf req-730fb573-b6f2-4415-ae20-2f73cffccaed service nova] Acquiring lock "refresh_cache-44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.273338] env[63371]: DEBUG oslo_concurrency.lockutils [req-6739e3a3-b3b3-4b0f-9b72-9377b7b81adf req-730fb573-b6f2-4415-ae20-2f73cffccaed service nova] Acquired lock "refresh_cache-44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.273497] env[63371]: DEBUG nova.network.neutron [req-6739e3a3-b3b3-4b0f-9b72-9377b7b81adf req-730fb573-b6f2-4415-ae20-2f73cffccaed service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Refreshing network info cache for port b12875f1-bdc7-4980-9223-c0a2fee47b86 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1613.460631] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Releasing lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.460895] env[63371]: DEBUG nova.compute.manager [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Instance network_info: |[{"id": "d3f41a80-52de-46a5-ac15-9a26e6710908", "address": "fa:16:3e:f6:cd:6b", "network": {"id": "9c25e5e9-468d-4d4c-93e0-c9815eff1c2e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-814005109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e693d73d70140c2ba065de2b60838c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3f41a80-52", "ovs_interfaceid": "d3f41a80-52de-46a5-ac15-9a26e6710908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1613.461330] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:cd:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7514a465-f1a4-4a8b-b76b-726b1a9d7e2f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3f41a80-52de-46a5-ac15-9a26e6710908', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1613.469076] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Creating folder: Project (2e693d73d70140c2ba065de2b60838c2). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1613.469368] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31053bd5-d0a6-4f3b-99b2-6a1ef23ad0a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.479387] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Created folder: Project (2e693d73d70140c2ba065de2b60838c2) in parent group-v368199. [ 1613.479565] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Creating folder: Instances. Parent ref: group-v368398. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1613.479792] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6dceed1-4959-4fa9-adda-3dcff79db178 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.488732] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Created folder: Instances in parent group-v368398. [ 1613.488975] env[63371]: DEBUG oslo.service.loopingcall [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1613.489173] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1613.489362] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67d25c5e-5f88-4955-a5a8-35b30dea3d37 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.507637] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1613.507637] env[63371]: value = "task-1774320" [ 1613.507637] env[63371]: _type = "Task" [ 1613.507637] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.514873] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774320, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.622853] env[63371]: DEBUG nova.compute.manager [req-12557d34-1370-4768-ab25-92a4bc8bfe77 req-a5fc3e84-9f41-44a1-b06b-89f4118bb459 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Received event network-changed-d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1613.623160] env[63371]: DEBUG nova.compute.manager [req-12557d34-1370-4768-ab25-92a4bc8bfe77 req-a5fc3e84-9f41-44a1-b06b-89f4118bb459 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Refreshing instance network info cache due to event network-changed-d3f41a80-52de-46a5-ac15-9a26e6710908. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1613.623294] env[63371]: DEBUG oslo_concurrency.lockutils [req-12557d34-1370-4768-ab25-92a4bc8bfe77 req-a5fc3e84-9f41-44a1-b06b-89f4118bb459 service nova] Acquiring lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1613.623433] env[63371]: DEBUG oslo_concurrency.lockutils [req-12557d34-1370-4768-ab25-92a4bc8bfe77 req-a5fc3e84-9f41-44a1-b06b-89f4118bb459 service nova] Acquired lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1613.623656] env[63371]: DEBUG nova.network.neutron [req-12557d34-1370-4768-ab25-92a4bc8bfe77 req-a5fc3e84-9f41-44a1-b06b-89f4118bb459 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Refreshing network info cache for port d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1613.645359] env[63371]: INFO nova.compute.manager [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Took 45.46 seconds to build instance. [ 1613.755258] env[63371]: DEBUG nova.compute.manager [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1613.781936] env[63371]: DEBUG nova.virt.hardware [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1613.782109] env[63371]: DEBUG nova.virt.hardware [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1613.782263] env[63371]: DEBUG nova.virt.hardware [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1613.782485] env[63371]: DEBUG nova.virt.hardware [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1613.782620] env[63371]: DEBUG nova.virt.hardware [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1613.782754] env[63371]: DEBUG nova.virt.hardware [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1613.783018] env[63371]: DEBUG nova.virt.hardware [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1613.783199] env[63371]: DEBUG nova.virt.hardware [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1613.783371] env[63371]: DEBUG nova.virt.hardware [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1613.783562] env[63371]: DEBUG nova.virt.hardware [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1613.783747] env[63371]: DEBUG nova.virt.hardware [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1613.784769] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-748e9024-4577-44d0-a4f9-7e6e20573e87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.794395] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c536b448-faf3-4136-bacc-80f68715c5e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.020135] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774320, 'name': CreateVM_Task, 'duration_secs': 0.361968} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.020269] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1614.020953] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.021160] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.021512] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1614.021746] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c894c314-c372-4dc4-9a13-4ba8115b6bc7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.025774] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb901db-4db3-4d8c-acef-4789c623da45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.029447] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1614.029447] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5210cf73-a09c-c42f-1e8e-b2fb88a453c0" [ 1614.029447] env[63371]: _type = "Task" [ 1614.029447] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.035680] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1164e447-cbaa-4fb2-9af3-1761358520dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.042081] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5210cf73-a09c-c42f-1e8e-b2fb88a453c0, 'name': SearchDatastore_Task, 'duration_secs': 0.009695} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.042837] env[63371]: DEBUG nova.network.neutron [req-6739e3a3-b3b3-4b0f-9b72-9377b7b81adf req-730fb573-b6f2-4415-ae20-2f73cffccaed service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Updated VIF entry in instance network info cache for port b12875f1-bdc7-4980-9223-c0a2fee47b86. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1614.043174] env[63371]: DEBUG nova.network.neutron [req-6739e3a3-b3b3-4b0f-9b72-9377b7b81adf req-730fb573-b6f2-4415-ae20-2f73cffccaed service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Updating instance_info_cache with network_info: [{"id": "b12875f1-bdc7-4980-9223-c0a2fee47b86", "address": "fa:16:3e:e0:2e:e7", "network": {"id": "ec9bd35d-8a6f-4ed0-83c5-18c1ba123bd4", "bridge": "br-int", "label": "tempest-ServersTestJSON-1570224473-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ab55ffce414a461aa6f77a83100b7346", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a9abd00f-2cea-40f8-9804-a56b6431192d", "external-id": "nsx-vlan-transportzone-639", "segmentation_id": 639, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb12875f1-bd", "ovs_interfaceid": "b12875f1-bdc7-4980-9223-c0a2fee47b86", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.044649] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.044905] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1614.045158] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.045308] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.045485] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1614.045952] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3174b1b-95dd-4a6c-a6cb-c9c12b1744b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.074613] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50a469f8-ee24-43c2-8cfa-d40cfb8d7807 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.080084] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1614.080276] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1614.082835] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99a307eb-55b4-436c-bf95-6a1a303cd098 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.086524] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a45e67-596e-4a3d-a0bd-790a1575d0e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.092841] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1614.092841] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5256561e-71dd-b61b-e177-f3737b0aca95" [ 1614.092841] env[63371]: _type = "Task" [ 1614.092841] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.101558] env[63371]: DEBUG nova.compute.provider_tree [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1614.111302] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5256561e-71dd-b61b-e177-f3737b0aca95, 'name': SearchDatastore_Task, 'duration_secs': 0.008276} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.111638] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa507b84-13fc-4aad-b50c-9a0c80644053 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.116456] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1614.116456] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525cb3b6-f047-16ba-6e7c-ef15433bf352" [ 1614.116456] env[63371]: _type = "Task" [ 1614.116456] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.125285] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525cb3b6-f047-16ba-6e7c-ef15433bf352, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.147382] env[63371]: DEBUG oslo_concurrency.lockutils [None req-35fdbfc2-e293-4dd4-b662-c772208f6e2b tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.577s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.541871] env[63371]: DEBUG nova.network.neutron [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Successfully updated port: 24bd2275-5bff-4a52-a3f0-63ef1b63b73b {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1614.549781] env[63371]: DEBUG oslo_concurrency.lockutils [req-6739e3a3-b3b3-4b0f-9b72-9377b7b81adf req-730fb573-b6f2-4415-ae20-2f73cffccaed service nova] Releasing lock "refresh_cache-44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.579266] env[63371]: DEBUG nova.network.neutron [req-12557d34-1370-4768-ab25-92a4bc8bfe77 req-a5fc3e84-9f41-44a1-b06b-89f4118bb459 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updated VIF entry in instance network info cache for port d3f41a80-52de-46a5-ac15-9a26e6710908. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1614.579643] env[63371]: DEBUG nova.network.neutron [req-12557d34-1370-4768-ab25-92a4bc8bfe77 req-a5fc3e84-9f41-44a1-b06b-89f4118bb459 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updating instance_info_cache with network_info: [{"id": "d3f41a80-52de-46a5-ac15-9a26e6710908", "address": "fa:16:3e:f6:cd:6b", "network": {"id": "9c25e5e9-468d-4d4c-93e0-c9815eff1c2e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-814005109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e693d73d70140c2ba065de2b60838c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3f41a80-52", "ovs_interfaceid": "d3f41a80-52de-46a5-ac15-9a26e6710908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1614.607138] env[63371]: DEBUG nova.scheduler.client.report [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1614.630698] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525cb3b6-f047-16ba-6e7c-ef15433bf352, 'name': SearchDatastore_Task, 'duration_secs': 0.008634} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.630938] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.631196] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9985dbcd-4498-4629-aae5-5e1933307c50/9985dbcd-4498-4629-aae5-5e1933307c50.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1614.631445] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ef8ddfb-dd9f-4d8a-bc0d-bf32c644934e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.638215] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1614.638215] env[63371]: value = "task-1774321" [ 1614.638215] env[63371]: _type = "Task" [ 1614.638215] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.645402] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774321, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.046569] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "refresh_cache-e781866e-9b26-47c7-b1a6-d6d9547bf2fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1615.047623] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "refresh_cache-e781866e-9b26-47c7-b1a6-d6d9547bf2fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1615.047623] env[63371]: DEBUG nova.network.neutron [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1615.082075] env[63371]: DEBUG oslo_concurrency.lockutils [req-12557d34-1370-4768-ab25-92a4bc8bfe77 req-a5fc3e84-9f41-44a1-b06b-89f4118bb459 service nova] Releasing lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1615.111832] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.973s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.115025] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.813s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.115025] env[63371]: DEBUG nova.objects.instance [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lazy-loading 'resources' on Instance uuid 1cb18f2a-6476-4492-8576-7b0fd693a107 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1615.140017] env[63371]: INFO nova.scheduler.client.report [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleted allocations for instance 704978f9-3b24-4a73-8f64-b8e3e9e94a04 [ 1615.152730] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774321, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.300942] env[63371]: DEBUG nova.compute.manager [req-00ffbf17-4c7b-4db1-a204-daeba9763a5e req-4336a983-44d6-4c71-83fb-b49f855565f2 service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Received event network-changed-df0bcb02-c7dd-42fe-96f3-e45841fdf782 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1615.301130] env[63371]: DEBUG nova.compute.manager [req-00ffbf17-4c7b-4db1-a204-daeba9763a5e req-4336a983-44d6-4c71-83fb-b49f855565f2 service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Refreshing instance network info cache due to event network-changed-df0bcb02-c7dd-42fe-96f3-e45841fdf782. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1615.301403] env[63371]: DEBUG oslo_concurrency.lockutils [req-00ffbf17-4c7b-4db1-a204-daeba9763a5e req-4336a983-44d6-4c71-83fb-b49f855565f2 service nova] Acquiring lock "refresh_cache-fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1615.301606] env[63371]: DEBUG oslo_concurrency.lockutils [req-00ffbf17-4c7b-4db1-a204-daeba9763a5e req-4336a983-44d6-4c71-83fb-b49f855565f2 service nova] Acquired lock "refresh_cache-fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1615.301773] env[63371]: DEBUG nova.network.neutron [req-00ffbf17-4c7b-4db1-a204-daeba9763a5e req-4336a983-44d6-4c71-83fb-b49f855565f2 service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Refreshing network info cache for port df0bcb02-c7dd-42fe-96f3-e45841fdf782 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1615.578824] env[63371]: DEBUG nova.network.neutron [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1615.654186] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774321, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.985418} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.654689] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30988ef8-3121-4abf-8735-ecf94587e183 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "704978f9-3b24-4a73-8f64-b8e3e9e94a04" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.036s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.657844] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9985dbcd-4498-4629-aae5-5e1933307c50/9985dbcd-4498-4629-aae5-5e1933307c50.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1615.658087] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1615.658714] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc489a5a-7f02-4053-ad68-696a05eb11da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.666019] env[63371]: DEBUG nova.compute.manager [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Received event network-vif-plugged-24bd2275-5bff-4a52-a3f0-63ef1b63b73b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1615.666019] env[63371]: DEBUG oslo_concurrency.lockutils [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] Acquiring lock "e781866e-9b26-47c7-b1a6-d6d9547bf2fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1615.666019] env[63371]: DEBUG oslo_concurrency.lockutils [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] Lock "e781866e-9b26-47c7-b1a6-d6d9547bf2fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1615.666019] env[63371]: DEBUG oslo_concurrency.lockutils [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] Lock "e781866e-9b26-47c7-b1a6-d6d9547bf2fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1615.666019] env[63371]: DEBUG nova.compute.manager [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] No waiting events found dispatching network-vif-plugged-24bd2275-5bff-4a52-a3f0-63ef1b63b73b {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1615.666019] env[63371]: WARNING nova.compute.manager [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Received unexpected event network-vif-plugged-24bd2275-5bff-4a52-a3f0-63ef1b63b73b for instance with vm_state building and task_state spawning. [ 1615.666019] env[63371]: DEBUG nova.compute.manager [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Received event network-changed-24bd2275-5bff-4a52-a3f0-63ef1b63b73b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1615.666019] env[63371]: DEBUG nova.compute.manager [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Refreshing instance network info cache due to event network-changed-24bd2275-5bff-4a52-a3f0-63ef1b63b73b. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1615.666019] env[63371]: DEBUG oslo_concurrency.lockutils [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] Acquiring lock "refresh_cache-e781866e-9b26-47c7-b1a6-d6d9547bf2fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1615.669174] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1615.669174] env[63371]: value = "task-1774322" [ 1615.669174] env[63371]: _type = "Task" [ 1615.669174] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.683693] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774322, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.758965] env[63371]: DEBUG nova.network.neutron [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Updating instance_info_cache with network_info: [{"id": "24bd2275-5bff-4a52-a3f0-63ef1b63b73b", "address": "fa:16:3e:47:21:ab", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24bd2275-5b", "ovs_interfaceid": "24bd2275-5bff-4a52-a3f0-63ef1b63b73b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1615.992571] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb486c1-5101-411e-8cfc-fe07bff558d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.000669] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1b6679-0504-45d3-b1ad-10a058fa6a56 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.041527] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b256cd73-6445-4f31-855f-536378807fd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.049475] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e280fc8-3131-4764-bf7f-c8e5f826dda5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.063098] env[63371]: DEBUG nova.compute.provider_tree [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1616.111545] env[63371]: DEBUG nova.network.neutron [req-00ffbf17-4c7b-4db1-a204-daeba9763a5e req-4336a983-44d6-4c71-83fb-b49f855565f2 service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Updated VIF entry in instance network info cache for port df0bcb02-c7dd-42fe-96f3-e45841fdf782. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1616.111865] env[63371]: DEBUG nova.network.neutron [req-00ffbf17-4c7b-4db1-a204-daeba9763a5e req-4336a983-44d6-4c71-83fb-b49f855565f2 service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Updating instance_info_cache with network_info: [{"id": "df0bcb02-c7dd-42fe-96f3-e45841fdf782", "address": "fa:16:3e:bc:47:fd", "network": {"id": "1dc1e53b-b865-4642-b667-e771524c6438", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-562533890-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fd1634f21c45efa8606cf6c339a790", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0bcb02-c7", "ovs_interfaceid": "df0bcb02-c7dd-42fe-96f3-e45841fdf782", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1616.183031] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774322, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128406} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.183300] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1616.183992] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa29e4a-6994-490b-8bda-ba96b22d797a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.207740] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 9985dbcd-4498-4629-aae5-5e1933307c50/9985dbcd-4498-4629-aae5-5e1933307c50.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1616.207987] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-599b0564-eb7e-4e82-83d8-bc8d6924ff31 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.227398] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1616.227398] env[63371]: value = "task-1774323" [ 1616.227398] env[63371]: _type = "Task" [ 1616.227398] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.235555] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774323, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.267825] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "refresh_cache-e781866e-9b26-47c7-b1a6-d6d9547bf2fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.268225] env[63371]: DEBUG nova.compute.manager [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Instance network_info: |[{"id": "24bd2275-5bff-4a52-a3f0-63ef1b63b73b", "address": "fa:16:3e:47:21:ab", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24bd2275-5b", "ovs_interfaceid": "24bd2275-5bff-4a52-a3f0-63ef1b63b73b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1616.268603] env[63371]: DEBUG oslo_concurrency.lockutils [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] Acquired lock "refresh_cache-e781866e-9b26-47c7-b1a6-d6d9547bf2fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.268812] env[63371]: DEBUG nova.network.neutron [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Refreshing network info cache for port 24bd2275-5bff-4a52-a3f0-63ef1b63b73b {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1616.270120] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:21:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24bd2275-5bff-4a52-a3f0-63ef1b63b73b', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1616.277689] env[63371]: DEBUG oslo.service.loopingcall [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1616.278199] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1616.279344] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4aa33da4-703b-496c-830c-6892c726cf4d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.299089] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1616.299089] env[63371]: value = "task-1774324" [ 1616.299089] env[63371]: _type = "Task" [ 1616.299089] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.306939] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774324, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.603765] env[63371]: DEBUG nova.scheduler.client.report [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 100 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1616.604076] env[63371]: DEBUG nova.compute.provider_tree [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 100 to 101 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1616.604302] env[63371]: DEBUG nova.compute.provider_tree [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1616.614156] env[63371]: DEBUG oslo_concurrency.lockutils [req-00ffbf17-4c7b-4db1-a204-daeba9763a5e req-4336a983-44d6-4c71-83fb-b49f855565f2 service nova] Releasing lock "refresh_cache-fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.738623] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.809345] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774324, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.110220] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.995s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.112328] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.521s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1617.113911] env[63371]: INFO nova.compute.claims [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1617.136120] env[63371]: INFO nova.scheduler.client.report [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Deleted allocations for instance 1cb18f2a-6476-4492-8576-7b0fd693a107 [ 1617.241451] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774323, 'name': ReconfigVM_Task, 'duration_secs': 0.654197} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.241451] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 9985dbcd-4498-4629-aae5-5e1933307c50/9985dbcd-4498-4629-aae5-5e1933307c50.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1617.241451] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aa49d389-25b5-4139-9106-f5a529a913af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.248183] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1617.248183] env[63371]: value = "task-1774325" [ 1617.248183] env[63371]: _type = "Task" [ 1617.248183] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.260813] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774325, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.261638] env[63371]: DEBUG nova.network.neutron [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Updated VIF entry in instance network info cache for port 24bd2275-5bff-4a52-a3f0-63ef1b63b73b. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1617.261975] env[63371]: DEBUG nova.network.neutron [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Updating instance_info_cache with network_info: [{"id": "24bd2275-5bff-4a52-a3f0-63ef1b63b73b", "address": "fa:16:3e:47:21:ab", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24bd2275-5b", "ovs_interfaceid": "24bd2275-5bff-4a52-a3f0-63ef1b63b73b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1617.309781] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774324, 'name': CreateVM_Task, 'duration_secs': 0.577324} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.310077] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1617.310742] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.310905] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.311285] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1617.311615] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-519abef5-7c1c-41b9-b2ca-d87f35230146 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.316373] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1617.316373] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]529a7628-3c5f-c629-8990-d18cef2350d1" [ 1617.316373] env[63371]: _type = "Task" [ 1617.316373] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.326428] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529a7628-3c5f-c629-8990-d18cef2350d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.643950] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d5e0d58c-7ddb-49cc-b0a5-46e8efeea583 tempest-ServerMetadataTestJSON-1172086561 tempest-ServerMetadataTestJSON-1172086561-project-member] Lock "1cb18f2a-6476-4492-8576-7b0fd693a107" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.435s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1617.760692] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774325, 'name': Rename_Task, 'duration_secs': 0.17987} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.761067] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1617.761258] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b83a6ed-9e4c-4bef-b20a-19b03b4651c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.766011] env[63371]: DEBUG oslo_concurrency.lockutils [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] Releasing lock "refresh_cache-e781866e-9b26-47c7-b1a6-d6d9547bf2fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.766011] env[63371]: DEBUG nova.compute.manager [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Received event network-changed-df0bcb02-c7dd-42fe-96f3-e45841fdf782 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1617.766011] env[63371]: DEBUG nova.compute.manager [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Refreshing instance network info cache due to event network-changed-df0bcb02-c7dd-42fe-96f3-e45841fdf782. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1617.766011] env[63371]: DEBUG oslo_concurrency.lockutils [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] Acquiring lock "refresh_cache-fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.766011] env[63371]: DEBUG oslo_concurrency.lockutils [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] Acquired lock "refresh_cache-fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.766011] env[63371]: DEBUG nova.network.neutron [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Refreshing network info cache for port df0bcb02-c7dd-42fe-96f3-e45841fdf782 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1617.769015] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1617.769015] env[63371]: value = "task-1774326" [ 1617.769015] env[63371]: _type = "Task" [ 1617.769015] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.777272] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774326, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.828335] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529a7628-3c5f-c629-8990-d18cef2350d1, 'name': SearchDatastore_Task, 'duration_secs': 0.013906} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.828335] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.828335] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1617.828335] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.828335] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.828335] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1617.828335] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a81a37b-323c-4868-94f3-3c76b0069fd9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.836537] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1617.836718] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1617.837474] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c1290bd-6ed3-4c3e-96ca-b01ab4a3e295 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.842943] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1617.842943] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52460987-bdf8-dced-1882-bbad6acd7bab" [ 1617.842943] env[63371]: _type = "Task" [ 1617.842943] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.850244] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52460987-bdf8-dced-1882-bbad6acd7bab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.965070] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.965313] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1618.282395] env[63371]: DEBUG oslo_vmware.api [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774326, 'name': PowerOnVM_Task, 'duration_secs': 0.4759} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.285754] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1618.285976] env[63371]: INFO nova.compute.manager [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Took 6.97 seconds to spawn the instance on the hypervisor. [ 1618.286167] env[63371]: DEBUG nova.compute.manager [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1618.287160] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a594376-af65-460e-ab44-42ea9f9b5a6f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.362327] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52460987-bdf8-dced-1882-bbad6acd7bab, 'name': SearchDatastore_Task, 'duration_secs': 0.01397} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.363613] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ba68c7b-6aab-46a0-936c-4847240970a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.374968] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1618.374968] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522ffef0-4ab5-7b7e-6ba4-99feb1335eec" [ 1618.374968] env[63371]: _type = "Task" [ 1618.374968] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.386084] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522ffef0-4ab5-7b7e-6ba4-99feb1335eec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.467536] env[63371]: DEBUG nova.compute.manager [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1618.574453] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286bdbb3-7e43-4828-a6c5-16dae678d486 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.582784] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84221ee3-9226-4995-9fb7-a060d104ba60 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.615933] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22d8ce7-5e15-409e-9294-d4342083dd61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.623617] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cae5fb-2490-4ccb-91c4-5c1c755243ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.636370] env[63371]: DEBUG nova.compute.provider_tree [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1618.672914] env[63371]: DEBUG nova.network.neutron [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Updated VIF entry in instance network info cache for port df0bcb02-c7dd-42fe-96f3-e45841fdf782. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1618.673734] env[63371]: DEBUG nova.network.neutron [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Updating instance_info_cache with network_info: [{"id": "df0bcb02-c7dd-42fe-96f3-e45841fdf782", "address": "fa:16:3e:bc:47:fd", "network": {"id": "1dc1e53b-b865-4642-b667-e771524c6438", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-562533890-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fd1634f21c45efa8606cf6c339a790", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf0bcb02-c7", "ovs_interfaceid": "df0bcb02-c7dd-42fe-96f3-e45841fdf782", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.812484] env[63371]: INFO nova.compute.manager [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Took 41.88 seconds to build instance. [ 1618.886878] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522ffef0-4ab5-7b7e-6ba4-99feb1335eec, 'name': SearchDatastore_Task, 'duration_secs': 0.010079} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.887225] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1618.887486] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e781866e-9b26-47c7-b1a6-d6d9547bf2fd/e781866e-9b26-47c7-b1a6-d6d9547bf2fd.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1618.887775] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7f5a34c-8dcb-4be4-b96e-f79d085abbf4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.894135] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1618.894135] env[63371]: value = "task-1774327" [ 1618.894135] env[63371]: _type = "Task" [ 1618.894135] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.902880] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774327, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.998630] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.158648] env[63371]: ERROR nova.scheduler.client.report [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [req-b55bd7ab-3a5b-4060-9140-59efd0f2360c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b55bd7ab-3a5b-4060-9140-59efd0f2360c"}]} [ 1619.175833] env[63371]: DEBUG nova.scheduler.client.report [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1619.177706] env[63371]: DEBUG oslo_concurrency.lockutils [req-59fbcb5b-17b3-4556-bd66-f258ce44ccff req-6f571c23-e8a1-4f34-b56d-c65e41b23aa4 service nova] Releasing lock "refresh_cache-fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.194405] env[63371]: DEBUG nova.scheduler.client.report [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1619.194643] env[63371]: DEBUG nova.compute.provider_tree [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1619.207214] env[63371]: DEBUG nova.scheduler.client.report [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1619.228677] env[63371]: DEBUG nova.scheduler.client.report [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1619.314463] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87ee554e-034c-4f20-a0ce-af297bf272e6 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "9985dbcd-4498-4629-aae5-5e1933307c50" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.392s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.410696] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774327, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.644322] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8e9a42-f279-42eb-ada9-ccb9b7f6040d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.652390] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c99620-4cbe-42f3-ba3c-825dc6d8400a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.684842] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea7c87a7-c9c8-4df2-b63b-26e6095b6976 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.692584] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-186daca3-4e06-4768-b488-f07729e7c8f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.708751] env[63371]: DEBUG nova.compute.provider_tree [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1619.908165] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774327, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.5582} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.908165] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e781866e-9b26-47c7-b1a6-d6d9547bf2fd/e781866e-9b26-47c7-b1a6-d6d9547bf2fd.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1619.908165] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1619.908165] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b6988a69-0149-4b49-a52f-7f6fbda6e951 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.913524] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1619.913524] env[63371]: value = "task-1774328" [ 1619.913524] env[63371]: _type = "Task" [ 1619.913524] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.921218] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774328, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.211849] env[63371]: DEBUG nova.scheduler.client.report [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1620.424386] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774328, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.265939} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.425321] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1620.426158] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b3d79f-8989-4b06-a8ad-c05511d23936 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.449217] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] e781866e-9b26-47c7-b1a6-d6d9547bf2fd/e781866e-9b26-47c7-b1a6-d6d9547bf2fd.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1620.449529] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41952f4e-7638-4619-b71f-9413be7050da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.469201] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1620.469201] env[63371]: value = "task-1774329" [ 1620.469201] env[63371]: _type = "Task" [ 1620.469201] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.476783] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774329, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.509549] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "3065fc71-f127-43b7-83b7-70140f29965b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1620.509780] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "3065fc71-f127-43b7-83b7-70140f29965b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.717311] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.605s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1620.718943] env[63371]: DEBUG nova.compute.manager [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1620.721371] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.361s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1620.725261] env[63371]: INFO nova.compute.claims [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1620.979686] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774329, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.016650] env[63371]: DEBUG nova.compute.manager [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1621.228560] env[63371]: DEBUG nova.compute.utils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1621.233494] env[63371]: DEBUG nova.compute.manager [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1621.233915] env[63371]: DEBUG nova.network.neutron [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1621.283903] env[63371]: DEBUG nova.policy [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c6416719728485f8dd45eea9e39fdc5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58f967d3770541269fb89f48b3df58c9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1621.482931] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774329, 'name': ReconfigVM_Task, 'duration_secs': 0.545063} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.483558] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Reconfigured VM instance instance-00000048 to attach disk [datastore1] e781866e-9b26-47c7-b1a6-d6d9547bf2fd/e781866e-9b26-47c7-b1a6-d6d9547bf2fd.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1621.484613] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d1912519-d6fb-48e1-aa20-5129d1627ab3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.491861] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1621.491861] env[63371]: value = "task-1774330" [ 1621.491861] env[63371]: _type = "Task" [ 1621.491861] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.507894] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774330, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.543957] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.566719] env[63371]: DEBUG nova.network.neutron [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Successfully created port: 2a9f10ee-aa45-47a6-81cc-6a16a7e15445 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1621.740380] env[63371]: DEBUG nova.compute.manager [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1622.002092] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774330, 'name': Rename_Task, 'duration_secs': 0.33843} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1622.004637] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1622.005071] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd402509-12e0-4c85-a5ae-0b036dd05594 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.011145] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1622.011145] env[63371]: value = "task-1774331" [ 1622.011145] env[63371]: _type = "Task" [ 1622.011145] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1622.019175] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774331, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.199099] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130ade74-92ce-4495-8bd5-f23ecac563db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.205864] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16030772-34a6-47c8-9261-36119ac766f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.238518] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36332c50-93a0-4213-8d4e-6f9871196f1f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.246265] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed1dfa5-951c-4f3f-af77-0ee14d7a435c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.263503] env[63371]: DEBUG nova.compute.provider_tree [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1622.522320] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774331, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.755286] env[63371]: DEBUG nova.compute.manager [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1622.767113] env[63371]: DEBUG nova.scheduler.client.report [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1622.783511] env[63371]: DEBUG nova.virt.hardware [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1622.784720] env[63371]: DEBUG nova.virt.hardware [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1622.784720] env[63371]: DEBUG nova.virt.hardware [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1622.784720] env[63371]: DEBUG nova.virt.hardware [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1622.784720] env[63371]: DEBUG nova.virt.hardware [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1622.784720] env[63371]: DEBUG nova.virt.hardware [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1622.785011] env[63371]: DEBUG nova.virt.hardware [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1622.785011] env[63371]: DEBUG nova.virt.hardware [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1622.785158] env[63371]: DEBUG nova.virt.hardware [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1622.785347] env[63371]: DEBUG nova.virt.hardware [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1622.785547] env[63371]: DEBUG nova.virt.hardware [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1622.786546] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af479613-040f-4028-9327-1561d1d68f22 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.796269] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04beb9de-683a-4736-aef6-add7b48bb658 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.025635] env[63371]: DEBUG oslo_vmware.api [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774331, 'name': PowerOnVM_Task, 'duration_secs': 0.911968} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.025907] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1623.026124] env[63371]: INFO nova.compute.manager [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Took 9.27 seconds to spawn the instance on the hypervisor. [ 1623.026297] env[63371]: DEBUG nova.compute.manager [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1623.027136] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f68fb31c-67b8-44d3-847a-cc4ae4262527 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.075258] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Acquiring lock "36d5c00a-4762-4801-aff1-0a22e336730a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.075500] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Lock "36d5c00a-4762-4801-aff1-0a22e336730a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.101039] env[63371]: DEBUG nova.compute.manager [req-e1804fb2-33ca-4e1b-a21b-79d69e976f51 req-b14f5485-945f-41b7-ad5a-4cb37e54eba2 service nova] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Received event network-vif-plugged-2a9f10ee-aa45-47a6-81cc-6a16a7e15445 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1623.101271] env[63371]: DEBUG oslo_concurrency.lockutils [req-e1804fb2-33ca-4e1b-a21b-79d69e976f51 req-b14f5485-945f-41b7-ad5a-4cb37e54eba2 service nova] Acquiring lock "3027832f-12cd-4255-b699-bcbb254a6c5a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1623.101477] env[63371]: DEBUG oslo_concurrency.lockutils [req-e1804fb2-33ca-4e1b-a21b-79d69e976f51 req-b14f5485-945f-41b7-ad5a-4cb37e54eba2 service nova] Lock "3027832f-12cd-4255-b699-bcbb254a6c5a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.101678] env[63371]: DEBUG oslo_concurrency.lockutils [req-e1804fb2-33ca-4e1b-a21b-79d69e976f51 req-b14f5485-945f-41b7-ad5a-4cb37e54eba2 service nova] Lock "3027832f-12cd-4255-b699-bcbb254a6c5a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.101872] env[63371]: DEBUG nova.compute.manager [req-e1804fb2-33ca-4e1b-a21b-79d69e976f51 req-b14f5485-945f-41b7-ad5a-4cb37e54eba2 service nova] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] No waiting events found dispatching network-vif-plugged-2a9f10ee-aa45-47a6-81cc-6a16a7e15445 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1623.102024] env[63371]: WARNING nova.compute.manager [req-e1804fb2-33ca-4e1b-a21b-79d69e976f51 req-b14f5485-945f-41b7-ad5a-4cb37e54eba2 service nova] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Received unexpected event network-vif-plugged-2a9f10ee-aa45-47a6-81cc-6a16a7e15445 for instance with vm_state building and task_state spawning. [ 1623.206387] env[63371]: DEBUG nova.network.neutron [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Successfully updated port: 2a9f10ee-aa45-47a6-81cc-6a16a7e15445 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1623.272192] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.551s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1623.272717] env[63371]: DEBUG nova.compute.manager [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1623.275167] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.555s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1623.275761] env[63371]: DEBUG nova.objects.instance [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Lazy-loading 'resources' on Instance uuid 1276e001-fb07-4367-8b03-81c5fe5fbd0d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1623.546808] env[63371]: INFO nova.compute.manager [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Took 44.24 seconds to build instance. [ 1623.577992] env[63371]: DEBUG nova.compute.manager [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1623.710758] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "refresh_cache-3027832f-12cd-4255-b699-bcbb254a6c5a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.710953] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "refresh_cache-3027832f-12cd-4255-b699-bcbb254a6c5a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.711115] env[63371]: DEBUG nova.network.neutron [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1623.779394] env[63371]: DEBUG nova.compute.utils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1623.786026] env[63371]: DEBUG nova.compute.manager [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1623.786026] env[63371]: DEBUG nova.network.neutron [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1623.846660] env[63371]: DEBUG nova.policy [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c56ea345388e4739ae655edfa839c305', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c99d37d52edb40f99efb471da50f5845', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1624.049704] env[63371]: DEBUG oslo_concurrency.lockutils [None req-675acec1-5564-4f06-99e7-6a36057f93ad tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "e781866e-9b26-47c7-b1a6-d6d9547bf2fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.758s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.098318] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1624.161771] env[63371]: DEBUG nova.network.neutron [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Successfully created port: 91e0886d-8e37-4f74-9933-1f1b49ae72e4 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1624.254131] env[63371]: DEBUG nova.network.neutron [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1624.284717] env[63371]: DEBUG nova.compute.manager [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1624.318167] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0de9f09-4a33-478d-b86c-9627051e8cd5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.325760] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba8c2b3-7ac1-48a0-9b5a-841798951a24 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.365021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bd010d-5ed3-416a-b079-bd3410f7d446 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.370796] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2042eeb-97cb-4624-85b1-6bf9f9ff3adb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.384967] env[63371]: DEBUG nova.compute.provider_tree [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1624.495727] env[63371]: DEBUG nova.network.neutron [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Updating instance_info_cache with network_info: [{"id": "2a9f10ee-aa45-47a6-81cc-6a16a7e15445", "address": "fa:16:3e:e5:cf:dc", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9f10ee-aa", "ovs_interfaceid": "2a9f10ee-aa45-47a6-81cc-6a16a7e15445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.865445] env[63371]: DEBUG nova.compute.manager [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1624.866420] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3bf26f-519c-4e0a-833e-f81b12550b07 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.888352] env[63371]: DEBUG nova.scheduler.client.report [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1624.998263] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "refresh_cache-3027832f-12cd-4255-b699-bcbb254a6c5a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.998600] env[63371]: DEBUG nova.compute.manager [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Instance network_info: |[{"id": "2a9f10ee-aa45-47a6-81cc-6a16a7e15445", "address": "fa:16:3e:e5:cf:dc", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9f10ee-aa", "ovs_interfaceid": "2a9f10ee-aa45-47a6-81cc-6a16a7e15445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1624.999034] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:cf:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2a9f10ee-aa45-47a6-81cc-6a16a7e15445', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1625.006689] env[63371]: DEBUG oslo.service.loopingcall [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.006904] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1625.007138] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b254c28f-37a1-401e-85b4-b57b06223389 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.027802] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1625.027802] env[63371]: value = "task-1774332" [ 1625.027802] env[63371]: _type = "Task" [ 1625.027802] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.127279] env[63371]: DEBUG nova.compute.manager [req-083fbf0f-c95e-4f48-87b3-f9e0c527eb81 req-9e02baee-a64d-498f-a717-01d0ecda1dc8 service nova] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Received event network-changed-2a9f10ee-aa45-47a6-81cc-6a16a7e15445 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1625.127468] env[63371]: DEBUG nova.compute.manager [req-083fbf0f-c95e-4f48-87b3-f9e0c527eb81 req-9e02baee-a64d-498f-a717-01d0ecda1dc8 service nova] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Refreshing instance network info cache due to event network-changed-2a9f10ee-aa45-47a6-81cc-6a16a7e15445. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1625.127679] env[63371]: DEBUG oslo_concurrency.lockutils [req-083fbf0f-c95e-4f48-87b3-f9e0c527eb81 req-9e02baee-a64d-498f-a717-01d0ecda1dc8 service nova] Acquiring lock "refresh_cache-3027832f-12cd-4255-b699-bcbb254a6c5a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.127818] env[63371]: DEBUG oslo_concurrency.lockutils [req-083fbf0f-c95e-4f48-87b3-f9e0c527eb81 req-9e02baee-a64d-498f-a717-01d0ecda1dc8 service nova] Acquired lock "refresh_cache-3027832f-12cd-4255-b699-bcbb254a6c5a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.127977] env[63371]: DEBUG nova.network.neutron [req-083fbf0f-c95e-4f48-87b3-f9e0c527eb81 req-9e02baee-a64d-498f-a717-01d0ecda1dc8 service nova] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Refreshing network info cache for port 2a9f10ee-aa45-47a6-81cc-6a16a7e15445 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1625.298223] env[63371]: DEBUG nova.compute.manager [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1625.321904] env[63371]: DEBUG nova.virt.hardware [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1625.322254] env[63371]: DEBUG nova.virt.hardware [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1625.322544] env[63371]: DEBUG nova.virt.hardware [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1625.322894] env[63371]: DEBUG nova.virt.hardware [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1625.323128] env[63371]: DEBUG nova.virt.hardware [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1625.323354] env[63371]: DEBUG nova.virt.hardware [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1625.323638] env[63371]: DEBUG nova.virt.hardware [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1625.323874] env[63371]: DEBUG nova.virt.hardware [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1625.324196] env[63371]: DEBUG nova.virt.hardware [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1625.324428] env[63371]: DEBUG nova.virt.hardware [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1625.324679] env[63371]: DEBUG nova.virt.hardware [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1625.325747] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7250fac8-3ac0-43f5-91dc-d52637d55a12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.335130] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df840b73-d3b4-45b0-be24-ec008ea0e4ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.377847] env[63371]: INFO nova.compute.manager [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] instance snapshotting [ 1625.381046] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1135bf0-91d1-40cd-8052-5b535677f9ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.400919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.126s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.403704] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.604s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.405234] env[63371]: INFO nova.compute.claims [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1625.408581] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3384980-68a2-4462-9972-275812a9a55b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.424062] env[63371]: INFO nova.scheduler.client.report [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Deleted allocations for instance 1276e001-fb07-4367-8b03-81c5fe5fbd0d [ 1625.538572] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774332, 'name': CreateVM_Task, 'duration_secs': 0.380799} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.538753] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1625.539632] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.539745] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.540129] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1625.540425] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97398640-a4df-4368-878b-2bcb2d8a1898 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.545773] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1625.545773] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fc5fb9-dac4-236f-9ae2-84be3e1a7339" [ 1625.545773] env[63371]: _type = "Task" [ 1625.545773] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.554729] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fc5fb9-dac4-236f-9ae2-84be3e1a7339, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.921311] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1625.921781] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-631b100f-a4d3-49c0-b121-b57f6fda9bdb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.932252] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ea7eef2f-f383-411a-99b4-7a5c96850c10 tempest-ServerAddressesNegativeTestJSON-1196450020 tempest-ServerAddressesNegativeTestJSON-1196450020-project-member] Lock "1276e001-fb07-4367-8b03-81c5fe5fbd0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.707s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.937995] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1625.937995] env[63371]: value = "task-1774333" [ 1625.937995] env[63371]: _type = "Task" [ 1625.937995] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.953821] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774333, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.057346] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fc5fb9-dac4-236f-9ae2-84be3e1a7339, 'name': SearchDatastore_Task, 'duration_secs': 0.009139} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.057707] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.059183] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1626.059183] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.059183] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.059183] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1626.059183] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b006018-d2b9-4841-9369-6447ef08c4be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.068970] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1626.069365] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1626.070066] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e54e54fb-f2ac-485c-b592-54b05ba20cba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.075933] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1626.075933] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ccbd38-a42f-0ea5-0b88-72b4ab1a9da4" [ 1626.075933] env[63371]: _type = "Task" [ 1626.075933] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.085513] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ccbd38-a42f-0ea5-0b88-72b4ab1a9da4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.182337] env[63371]: DEBUG nova.network.neutron [req-083fbf0f-c95e-4f48-87b3-f9e0c527eb81 req-9e02baee-a64d-498f-a717-01d0ecda1dc8 service nova] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Updated VIF entry in instance network info cache for port 2a9f10ee-aa45-47a6-81cc-6a16a7e15445. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1626.182337] env[63371]: DEBUG nova.network.neutron [req-083fbf0f-c95e-4f48-87b3-f9e0c527eb81 req-9e02baee-a64d-498f-a717-01d0ecda1dc8 service nova] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Updating instance_info_cache with network_info: [{"id": "2a9f10ee-aa45-47a6-81cc-6a16a7e15445", "address": "fa:16:3e:e5:cf:dc", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2a9f10ee-aa", "ovs_interfaceid": "2a9f10ee-aa45-47a6-81cc-6a16a7e15445", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.185428] env[63371]: DEBUG nova.network.neutron [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Successfully updated port: 91e0886d-8e37-4f74-9933-1f1b49ae72e4 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1626.448898] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774333, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.589794] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ccbd38-a42f-0ea5-0b88-72b4ab1a9da4, 'name': SearchDatastore_Task, 'duration_secs': 0.011665} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.590558] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5376cb8-097f-4e24-9607-bdd0e4a6103b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.595569] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1626.595569] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520407f1-cf0d-d443-d2ef-58a738ec9afc" [ 1626.595569] env[63371]: _type = "Task" [ 1626.595569] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.603832] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520407f1-cf0d-d443-d2ef-58a738ec9afc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.689366] env[63371]: DEBUG oslo_concurrency.lockutils [req-083fbf0f-c95e-4f48-87b3-f9e0c527eb81 req-9e02baee-a64d-498f-a717-01d0ecda1dc8 service nova] Releasing lock "refresh_cache-3027832f-12cd-4255-b699-bcbb254a6c5a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.690009] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "refresh_cache-0cd2018f-7a54-4458-b5fd-353ab75ffbfd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.690139] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquired lock "refresh_cache-0cd2018f-7a54-4458-b5fd-353ab75ffbfd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.690286] env[63371]: DEBUG nova.network.neutron [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1626.848889] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7536ffd0-a6c6-465b-9fca-0b8f9538c814 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.857357] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9292a42a-d815-4262-a2a6-7b8655863ecd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.888154] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c87bf48-23d3-4c08-bae0-c029c188e5b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.896040] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901df2d9-75a5-417c-a0ca-b9af2816c09a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.909014] env[63371]: DEBUG nova.compute.provider_tree [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1626.950613] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774333, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.107019] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520407f1-cf0d-d443-d2ef-58a738ec9afc, 'name': SearchDatastore_Task, 'duration_secs': 0.010799} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.107019] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.107019] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3027832f-12cd-4255-b699-bcbb254a6c5a/3027832f-12cd-4255-b699-bcbb254a6c5a.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1627.107019] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c1a84148-0f02-4d70-b926-9f50bfd7e22c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.115363] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1627.115363] env[63371]: value = "task-1774334" [ 1627.115363] env[63371]: _type = "Task" [ 1627.115363] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.122788] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.151761] env[63371]: DEBUG nova.compute.manager [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Received event network-vif-plugged-91e0886d-8e37-4f74-9933-1f1b49ae72e4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1627.151977] env[63371]: DEBUG oslo_concurrency.lockutils [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] Acquiring lock "0cd2018f-7a54-4458-b5fd-353ab75ffbfd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.152191] env[63371]: DEBUG oslo_concurrency.lockutils [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] Lock "0cd2018f-7a54-4458-b5fd-353ab75ffbfd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.152351] env[63371]: DEBUG oslo_concurrency.lockutils [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] Lock "0cd2018f-7a54-4458-b5fd-353ab75ffbfd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.152513] env[63371]: DEBUG nova.compute.manager [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] No waiting events found dispatching network-vif-plugged-91e0886d-8e37-4f74-9933-1f1b49ae72e4 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1627.152688] env[63371]: WARNING nova.compute.manager [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Received unexpected event network-vif-plugged-91e0886d-8e37-4f74-9933-1f1b49ae72e4 for instance with vm_state building and task_state spawning. [ 1627.152869] env[63371]: DEBUG nova.compute.manager [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Received event network-changed-91e0886d-8e37-4f74-9933-1f1b49ae72e4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1627.153050] env[63371]: DEBUG nova.compute.manager [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Refreshing instance network info cache due to event network-changed-91e0886d-8e37-4f74-9933-1f1b49ae72e4. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1627.153209] env[63371]: DEBUG oslo_concurrency.lockutils [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] Acquiring lock "refresh_cache-0cd2018f-7a54-4458-b5fd-353ab75ffbfd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.231210] env[63371]: DEBUG nova.network.neutron [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1627.411699] env[63371]: DEBUG nova.scheduler.client.report [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1627.417280] env[63371]: DEBUG nova.network.neutron [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Updating instance_info_cache with network_info: [{"id": "91e0886d-8e37-4f74-9933-1f1b49ae72e4", "address": "fa:16:3e:34:fa:31", "network": {"id": "6b2f7559-22c6-4657-b126-18f7ace337d5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1011247410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c99d37d52edb40f99efb471da50f5845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91e0886d-8e", "ovs_interfaceid": "91e0886d-8e37-4f74-9933-1f1b49ae72e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.452926] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774333, 'name': CreateSnapshot_Task, 'duration_secs': 1.164977} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.453235] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1627.454105] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d890918-5f36-434a-a8e8-18d0e672249b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.624990] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774334, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.919327] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Releasing lock "refresh_cache-0cd2018f-7a54-4458-b5fd-353ab75ffbfd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.919669] env[63371]: DEBUG nova.compute.manager [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Instance network_info: |[{"id": "91e0886d-8e37-4f74-9933-1f1b49ae72e4", "address": "fa:16:3e:34:fa:31", "network": {"id": "6b2f7559-22c6-4657-b126-18f7ace337d5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1011247410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c99d37d52edb40f99efb471da50f5845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91e0886d-8e", "ovs_interfaceid": "91e0886d-8e37-4f74-9933-1f1b49ae72e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1627.920396] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1627.920883] env[63371]: DEBUG nova.compute.manager [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1627.923448] env[63371]: DEBUG oslo_concurrency.lockutils [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] Acquired lock "refresh_cache-0cd2018f-7a54-4458-b5fd-353ab75ffbfd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.924100] env[63371]: DEBUG nova.network.neutron [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Refreshing network info cache for port 91e0886d-8e37-4f74-9933-1f1b49ae72e4 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1627.924658] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:fa:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '447ff42d-b33e-4b5d-8b7f-e8117ebbbc92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91e0886d-8e37-4f74-9933-1f1b49ae72e4', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1627.932196] env[63371]: DEBUG oslo.service.loopingcall [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1627.932387] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 26.201s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1627.934569] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1627.935486] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0daf5676-5edd-43e3-b5f4-a2955017317f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.956994] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1627.956994] env[63371]: value = "task-1774335" [ 1627.956994] env[63371]: _type = "Task" [ 1627.956994] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.965373] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774335, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.974377] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1627.974933] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-19d78b1b-9141-4ad9-81a6-6dbb72caafce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.984456] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1627.984456] env[63371]: value = "task-1774336" [ 1627.984456] env[63371]: _type = "Task" [ 1627.984456] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.992998] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774336, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.124100] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774334, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568852} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.124402] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3027832f-12cd-4255-b699-bcbb254a6c5a/3027832f-12cd-4255-b699-bcbb254a6c5a.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1628.124615] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1628.124868] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc850882-de46-4cd1-b3da-8a36beffd454 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.131142] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1628.131142] env[63371]: value = "task-1774337" [ 1628.131142] env[63371]: _type = "Task" [ 1628.131142] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.140495] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774337, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.438673] env[63371]: DEBUG nova.compute.utils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1628.450773] env[63371]: DEBUG nova.compute.manager [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1628.450956] env[63371]: DEBUG nova.network.neutron [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1628.470238] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774335, 'name': CreateVM_Task, 'duration_secs': 0.425695} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.470238] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1628.470771] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.470933] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.471280] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1628.471543] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4fe03dd-d9a6-4792-9ff2-dfbf1291fc07 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.477420] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1628.477420] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ed1fa4-3764-d1c8-fc1d-24ca82cb9c1f" [ 1628.477420] env[63371]: _type = "Task" [ 1628.477420] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.486345] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ed1fa4-3764-d1c8-fc1d-24ca82cb9c1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.494437] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774336, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.541811] env[63371]: DEBUG nova.policy [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25f28e53648c41d1a147c1aa04f0a708', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fb0da840f6847f19f03a1db8a1c3f4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1628.642216] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774337, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074199} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.644244] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1628.645053] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d295af8-30fe-46ec-9ae4-2d67239e9fd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.675637] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 3027832f-12cd-4255-b699-bcbb254a6c5a/3027832f-12cd-4255-b699-bcbb254a6c5a.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1628.675988] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e281fade-210c-468e-a112-0a8330cffed0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.698686] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1628.698686] env[63371]: value = "task-1774338" [ 1628.698686] env[63371]: _type = "Task" [ 1628.698686] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.709702] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774338, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.844652] env[63371]: DEBUG nova.network.neutron [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Updated VIF entry in instance network info cache for port 91e0886d-8e37-4f74-9933-1f1b49ae72e4. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1628.845059] env[63371]: DEBUG nova.network.neutron [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Updating instance_info_cache with network_info: [{"id": "91e0886d-8e37-4f74-9933-1f1b49ae72e4", "address": "fa:16:3e:34:fa:31", "network": {"id": "6b2f7559-22c6-4657-b126-18f7ace337d5", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1011247410-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c99d37d52edb40f99efb471da50f5845", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91e0886d-8e", "ovs_interfaceid": "91e0886d-8e37-4f74-9933-1f1b49ae72e4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.953866] env[63371]: DEBUG nova.compute.manager [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1628.957566] env[63371]: DEBUG nova.network.neutron [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Successfully created port: c42db3e1-640d-4925-b5a3-adb5ddbd8177 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1628.987912] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ed1fa4-3764-d1c8-fc1d-24ca82cb9c1f, 'name': SearchDatastore_Task, 'duration_secs': 0.010573} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.988892] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e4608e3c-7083-42fa-b88c-8ee007ef7f60 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.989071] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e912c210-3ae1-47ce-b9cd-afebf6195606 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.989155] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.989269] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 44cc8606-24f5-4f6b-b96f-3559c9c3f06e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.989383] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e8bd5802-d2ff-4348-92d4-c23277f4eaeb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.989521] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e6cd62ce-f6d2-4e5b-acbc-7527a94e0932 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1628.989642] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance b523486c-adae-4322-80be-1f3bf33ca192 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.989749] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.989885] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 6df9af10-0053-4696-920a-10ab2af67ef5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.989968] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 195de525-1081-4db6-acf3-04a6d3eb142f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.990086] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.990206] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1628.990345] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 9862b0f0-ccf6-4e69-9e78-cf864adaa65e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.990419] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance d00602b9-16bf-4c11-bc47-6076dddbf159 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.990525] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance c03e2dc4-75d9-4fbb-afc8-046cbbf908ac actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.990661] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 0c9156ea-81c4-4286-a20b-66068a5bce59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.990797] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1628.991331] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 3bd1c148-a48d-402c-bd76-2cb1d38b49f7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1628.991331] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.991331] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.991331] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 9985dbcd-4498-4629-aae5-5e1933307c50 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.991331] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e781866e-9b26-47c7-b1a6-d6d9547bf2fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.991647] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 3027832f-12cd-4255-b699-bcbb254a6c5a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.991647] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 0cd2018f-7a54-4458-b5fd-353ab75ffbfd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.991647] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 40644960-1400-4dc6-9f2b-78afb7492a8d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1628.996031] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.996269] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1628.996495] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.996642] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.996818] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1628.997805] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96633e60-2923-46b2-bc82-5e4d180a5c84 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.005818] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774336, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.007123] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1629.007297] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1629.007981] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca7d474e-5af3-4a5e-9d52-34428447a10e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.012995] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1629.012995] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e636a6-5318-b692-09a7-2e7a7937cbf6" [ 1629.012995] env[63371]: _type = "Task" [ 1629.012995] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.020630] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e636a6-5318-b692-09a7-2e7a7937cbf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.208989] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774338, 'name': ReconfigVM_Task, 'duration_secs': 0.300153} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.209345] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 3027832f-12cd-4255-b699-bcbb254a6c5a/3027832f-12cd-4255-b699-bcbb254a6c5a.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1629.210074] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64edd224-9348-4fbc-87fe-023d57a65338 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.216265] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1629.216265] env[63371]: value = "task-1774339" [ 1629.216265] env[63371]: _type = "Task" [ 1629.216265] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.225891] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774339, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.348453] env[63371]: DEBUG oslo_concurrency.lockutils [req-9797b7e6-4b1a-4d26-8231-1a77230d4d88 req-172ffa62-10c2-43a6-bc8d-2378c27f271e service nova] Releasing lock "refresh_cache-0cd2018f-7a54-4458-b5fd-353ab75ffbfd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1629.496234] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774336, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.498996] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e1bc4623-f6b5-4440-a58d-594e9cbe3628 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1629.523449] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e636a6-5318-b692-09a7-2e7a7937cbf6, 'name': SearchDatastore_Task, 'duration_secs': 0.009287} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.524233] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5b0a6760-1106-403d-a19f-4f5d1c90be83 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.529426] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1629.529426] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e06a65-874c-1516-da7c-4eb11d936c8c" [ 1629.529426] env[63371]: _type = "Task" [ 1629.529426] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.536735] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e06a65-874c-1516-da7c-4eb11d936c8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.726027] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774339, 'name': Rename_Task, 'duration_secs': 0.143701} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.726027] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1629.726759] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ace6364-9ef2-41cd-9d46-ac1d5468f753 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.734990] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1629.734990] env[63371]: value = "task-1774340" [ 1629.734990] env[63371]: _type = "Task" [ 1629.734990] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.743931] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774340, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.971018] env[63371]: DEBUG nova.compute.manager [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1629.993346] env[63371]: DEBUG nova.virt.hardware [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1629.993633] env[63371]: DEBUG nova.virt.hardware [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1629.993723] env[63371]: DEBUG nova.virt.hardware [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1629.993920] env[63371]: DEBUG nova.virt.hardware [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1629.994086] env[63371]: DEBUG nova.virt.hardware [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1629.994237] env[63371]: DEBUG nova.virt.hardware [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1629.994444] env[63371]: DEBUG nova.virt.hardware [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1629.994600] env[63371]: DEBUG nova.virt.hardware [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1629.994773] env[63371]: DEBUG nova.virt.hardware [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1629.994938] env[63371]: DEBUG nova.virt.hardware [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1629.995125] env[63371]: DEBUG nova.virt.hardware [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1629.995889] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f977d6d7-7083-41ee-8efd-c8293de0be53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.002639] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 3c6294ae-9a16-4f1e-abd4-1aec224625ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1630.008393] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef297c4-17a5-4ab5-be1d-eaaad424668b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.012073] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774336, 'name': CloneVM_Task, 'duration_secs': 1.876398} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.012306] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Created linked-clone VM from snapshot [ 1630.013244] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe83c23-ef52-4f59-98f2-4b3aff035dc3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.028916] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Uploading image 1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1630.039018] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e06a65-874c-1516-da7c-4eb11d936c8c, 'name': SearchDatastore_Task, 'duration_secs': 0.009732} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.039437] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.039565] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0cd2018f-7a54-4458-b5fd-353ab75ffbfd/0cd2018f-7a54-4458-b5fd-353ab75ffbfd.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1630.039815] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05adcead-b0cd-4d26-ba89-5b1a91c542ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.045676] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1630.045676] env[63371]: value = "task-1774341" [ 1630.045676] env[63371]: _type = "Task" [ 1630.045676] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.050061] env[63371]: DEBUG oslo_vmware.rw_handles [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1630.050061] env[63371]: value = "vm-368405" [ 1630.050061] env[63371]: _type = "VirtualMachine" [ 1630.050061] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1630.050286] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4e6e21d9-1dcf-4a5e-a057-e713f04044fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.055764] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774341, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.056837] env[63371]: DEBUG oslo_vmware.rw_handles [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lease: (returnval){ [ 1630.056837] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52023787-c69e-7549-72fb-e656cc51b0a6" [ 1630.056837] env[63371]: _type = "HttpNfcLease" [ 1630.056837] env[63371]: } obtained for exporting VM: (result){ [ 1630.056837] env[63371]: value = "vm-368405" [ 1630.056837] env[63371]: _type = "VirtualMachine" [ 1630.056837] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1630.057114] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the lease: (returnval){ [ 1630.057114] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52023787-c69e-7549-72fb-e656cc51b0a6" [ 1630.057114] env[63371]: _type = "HttpNfcLease" [ 1630.057114] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1630.062999] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1630.062999] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52023787-c69e-7549-72fb-e656cc51b0a6" [ 1630.062999] env[63371]: _type = "HttpNfcLease" [ 1630.062999] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1630.246109] env[63371]: DEBUG oslo_vmware.api [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774340, 'name': PowerOnVM_Task, 'duration_secs': 0.474684} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.246421] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1630.246636] env[63371]: INFO nova.compute.manager [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Took 7.49 seconds to spawn the instance on the hypervisor. [ 1630.246821] env[63371]: DEBUG nova.compute.manager [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1630.247583] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c31e41f-d889-45f3-996a-57293ed823ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.395925] env[63371]: DEBUG nova.compute.manager [req-e7a37fc2-30fd-43ae-8aae-e80179f9a16c req-03050de7-2d63-439f-935f-a89b737dc31c service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Received event network-vif-plugged-c42db3e1-640d-4925-b5a3-adb5ddbd8177 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1630.396179] env[63371]: DEBUG oslo_concurrency.lockutils [req-e7a37fc2-30fd-43ae-8aae-e80179f9a16c req-03050de7-2d63-439f-935f-a89b737dc31c service nova] Acquiring lock "40644960-1400-4dc6-9f2b-78afb7492a8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.396400] env[63371]: DEBUG oslo_concurrency.lockutils [req-e7a37fc2-30fd-43ae-8aae-e80179f9a16c req-03050de7-2d63-439f-935f-a89b737dc31c service nova] Lock "40644960-1400-4dc6-9f2b-78afb7492a8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.396571] env[63371]: DEBUG oslo_concurrency.lockutils [req-e7a37fc2-30fd-43ae-8aae-e80179f9a16c req-03050de7-2d63-439f-935f-a89b737dc31c service nova] Lock "40644960-1400-4dc6-9f2b-78afb7492a8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.396737] env[63371]: DEBUG nova.compute.manager [req-e7a37fc2-30fd-43ae-8aae-e80179f9a16c req-03050de7-2d63-439f-935f-a89b737dc31c service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] No waiting events found dispatching network-vif-plugged-c42db3e1-640d-4925-b5a3-adb5ddbd8177 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1630.396898] env[63371]: WARNING nova.compute.manager [req-e7a37fc2-30fd-43ae-8aae-e80179f9a16c req-03050de7-2d63-439f-935f-a89b737dc31c service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Received unexpected event network-vif-plugged-c42db3e1-640d-4925-b5a3-adb5ddbd8177 for instance with vm_state building and task_state spawning. [ 1630.481402] env[63371]: DEBUG nova.network.neutron [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Successfully updated port: c42db3e1-640d-4925-b5a3-adb5ddbd8177 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1630.505899] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 3065fc71-f127-43b7-83b7-70140f29965b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1630.555619] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774341, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.565117] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1630.565117] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52023787-c69e-7549-72fb-e656cc51b0a6" [ 1630.565117] env[63371]: _type = "HttpNfcLease" [ 1630.565117] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1630.565437] env[63371]: DEBUG oslo_vmware.rw_handles [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1630.565437] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52023787-c69e-7549-72fb-e656cc51b0a6" [ 1630.565437] env[63371]: _type = "HttpNfcLease" [ 1630.565437] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1630.566241] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0df0bfc-bffc-43f6-90f8-18d56e1f25a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.574783] env[63371]: DEBUG oslo_vmware.rw_handles [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52623404-7621-c678-0e77-687aca8dd4ef/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1630.574972] env[63371]: DEBUG oslo_vmware.rw_handles [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52623404-7621-c678-0e77-687aca8dd4ef/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1630.697036] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d59aa059-b30e-44bc-b73c-44132cd0e017 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.765549] env[63371]: INFO nova.compute.manager [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Took 44.19 seconds to build instance. [ 1630.984272] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "refresh_cache-40644960-1400-4dc6-9f2b-78afb7492a8d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.984620] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "refresh_cache-40644960-1400-4dc6-9f2b-78afb7492a8d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.984620] env[63371]: DEBUG nova.network.neutron [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1631.009601] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 36d5c00a-4762-4801-aff1-0a22e336730a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1631.009601] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 21 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1631.009776] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4608MB phys_disk=200GB used_disk=21GB total_vcpus=48 used_vcpus=21 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1631.057028] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774341, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.628635} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.060435] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0cd2018f-7a54-4458-b5fd-353ab75ffbfd/0cd2018f-7a54-4458-b5fd-353ab75ffbfd.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1631.060713] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1631.061514] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-72787686-8664-4169-b52f-ff3684698c89 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.068482] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1631.068482] env[63371]: value = "task-1774343" [ 1631.068482] env[63371]: _type = "Task" [ 1631.068482] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.079723] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774343, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.268115] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ef329a6-da00-404f-9737-0d8a5d740248 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "3027832f-12cd-4255-b699-bcbb254a6c5a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.707s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.410329] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e11c9c2-1e62-4b5d-ba36-0adcdba1aa2b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.421966] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb76ceba-de96-4032-b979-5ca45c5697ab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.455182] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a67c42ab-7099-4b1b-b716-cc330e886f2b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.463733] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf19498-92c2-4584-8bc7-8d0127594c98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.478482] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1631.537147] env[63371]: DEBUG nova.network.neutron [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1631.579444] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774343, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069014} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.579959] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1631.581166] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fff4349-ae42-44c7-ad12-efa503b5f225 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.605411] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 0cd2018f-7a54-4458-b5fd-353ab75ffbfd/0cd2018f-7a54-4458-b5fd-353ab75ffbfd.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1631.608344] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27fa0a0c-50c0-4fc6-8132-7b9569057481 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.630606] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1631.630606] env[63371]: value = "task-1774344" [ 1631.630606] env[63371]: _type = "Task" [ 1631.630606] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.639399] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "3027832f-12cd-4255-b699-bcbb254a6c5a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.639660] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "3027832f-12cd-4255-b699-bcbb254a6c5a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.639941] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "3027832f-12cd-4255-b699-bcbb254a6c5a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.640183] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "3027832f-12cd-4255-b699-bcbb254a6c5a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1631.640409] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "3027832f-12cd-4255-b699-bcbb254a6c5a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1631.642499] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774344, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.643425] env[63371]: INFO nova.compute.manager [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Terminating instance [ 1631.645398] env[63371]: DEBUG nova.compute.manager [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1631.645632] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1631.646395] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3165edc6-b86b-4cf8-ae70-c80e8668c5ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.653494] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1631.656243] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30afb51e-7caa-4930-834f-a6e1bb5364a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.666144] env[63371]: DEBUG oslo_vmware.api [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1631.666144] env[63371]: value = "task-1774345" [ 1631.666144] env[63371]: _type = "Task" [ 1631.666144] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.675901] env[63371]: DEBUG oslo_vmware.api [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774345, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.768380] env[63371]: DEBUG nova.network.neutron [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Updating instance_info_cache with network_info: [{"id": "c42db3e1-640d-4925-b5a3-adb5ddbd8177", "address": "fa:16:3e:65:14:e0", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc42db3e1-64", "ovs_interfaceid": "c42db3e1-640d-4925-b5a3-adb5ddbd8177", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.983959] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1632.140643] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774344, 'name': ReconfigVM_Task, 'duration_secs': 0.318322} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.140941] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 0cd2018f-7a54-4458-b5fd-353ab75ffbfd/0cd2018f-7a54-4458-b5fd-353ab75ffbfd.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1632.141584] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf79fd9a-7d8d-49bd-8c90-9e899523f148 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.148445] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1632.148445] env[63371]: value = "task-1774346" [ 1632.148445] env[63371]: _type = "Task" [ 1632.148445] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.156671] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774346, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.176193] env[63371]: DEBUG oslo_vmware.api [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774345, 'name': PowerOffVM_Task, 'duration_secs': 0.281098} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.176512] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1632.176687] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1632.177060] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b80fed3-8a39-4afd-8d8b-7e5f23ae77b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.272566] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "refresh_cache-40644960-1400-4dc6-9f2b-78afb7492a8d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.272566] env[63371]: DEBUG nova.compute.manager [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Instance network_info: |[{"id": "c42db3e1-640d-4925-b5a3-adb5ddbd8177", "address": "fa:16:3e:65:14:e0", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc42db3e1-64", "ovs_interfaceid": "c42db3e1-640d-4925-b5a3-adb5ddbd8177", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1632.273135] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:65:14:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c42db3e1-640d-4925-b5a3-adb5ddbd8177', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1632.282461] env[63371]: DEBUG oslo.service.loopingcall [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1632.284025] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1632.284392] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1632.284613] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1632.284842] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleting the datastore file [datastore1] 3027832f-12cd-4255-b699-bcbb254a6c5a {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1632.285109] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6ba2aab-59dc-4491-9fde-f0e28a3bb7be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.299830] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c88fc068-9f88-4e56-aa59-11e20adf3bec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.311381] env[63371]: DEBUG oslo_vmware.api [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1632.311381] env[63371]: value = "task-1774348" [ 1632.311381] env[63371]: _type = "Task" [ 1632.311381] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.313530] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1632.313530] env[63371]: value = "task-1774349" [ 1632.313530] env[63371]: _type = "Task" [ 1632.313530] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.325463] env[63371]: DEBUG oslo_vmware.api [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774348, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.329010] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774349, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.424533] env[63371]: DEBUG nova.compute.manager [req-cb4c5de8-3089-4eaa-b58f-40f59d2dee17 req-0f7d5558-31b7-48ac-b11b-6a9995fb0331 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Received event network-changed-c42db3e1-640d-4925-b5a3-adb5ddbd8177 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1632.424815] env[63371]: DEBUG nova.compute.manager [req-cb4c5de8-3089-4eaa-b58f-40f59d2dee17 req-0f7d5558-31b7-48ac-b11b-6a9995fb0331 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Refreshing instance network info cache due to event network-changed-c42db3e1-640d-4925-b5a3-adb5ddbd8177. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1632.424942] env[63371]: DEBUG oslo_concurrency.lockutils [req-cb4c5de8-3089-4eaa-b58f-40f59d2dee17 req-0f7d5558-31b7-48ac-b11b-6a9995fb0331 service nova] Acquiring lock "refresh_cache-40644960-1400-4dc6-9f2b-78afb7492a8d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.425153] env[63371]: DEBUG oslo_concurrency.lockutils [req-cb4c5de8-3089-4eaa-b58f-40f59d2dee17 req-0f7d5558-31b7-48ac-b11b-6a9995fb0331 service nova] Acquired lock "refresh_cache-40644960-1400-4dc6-9f2b-78afb7492a8d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.425240] env[63371]: DEBUG nova.network.neutron [req-cb4c5de8-3089-4eaa-b58f-40f59d2dee17 req-0f7d5558-31b7-48ac-b11b-6a9995fb0331 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Refreshing network info cache for port c42db3e1-640d-4925-b5a3-adb5ddbd8177 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1632.490952] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1632.491203] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.559s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.491500] env[63371]: DEBUG oslo_concurrency.lockutils [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 30.733s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1632.491731] env[63371]: DEBUG nova.objects.instance [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1632.660945] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774346, 'name': Rename_Task, 'duration_secs': 0.176377} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.661180] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1632.661485] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2ab3af3-498a-408d-85ac-9bd3f0ba1237 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.668890] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1632.668890] env[63371]: value = "task-1774350" [ 1632.668890] env[63371]: _type = "Task" [ 1632.668890] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.684817] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774350, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.825388] env[63371]: DEBUG oslo_vmware.api [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774348, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269176} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.828541] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1632.828735] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1632.828912] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1632.829096] env[63371]: INFO nova.compute.manager [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1632.829340] env[63371]: DEBUG oslo.service.loopingcall [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1632.829508] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774349, 'name': CreateVM_Task, 'duration_secs': 0.473151} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.829715] env[63371]: DEBUG nova.compute.manager [-] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1632.829812] env[63371]: DEBUG nova.network.neutron [-] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1632.831330] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1632.831935] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.832103] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.832403] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1632.832969] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e64afd68-739f-4f12-8cbc-46b0186557a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.837748] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1632.837748] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e0d02e-d4bb-6736-fc2f-ef2b7c157265" [ 1632.837748] env[63371]: _type = "Task" [ 1632.837748] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.847939] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e0d02e-d4bb-6736-fc2f-ef2b7c157265, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.180314] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774350, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.259360] env[63371]: DEBUG nova.network.neutron [req-cb4c5de8-3089-4eaa-b58f-40f59d2dee17 req-0f7d5558-31b7-48ac-b11b-6a9995fb0331 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Updated VIF entry in instance network info cache for port c42db3e1-640d-4925-b5a3-adb5ddbd8177. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1633.259360] env[63371]: DEBUG nova.network.neutron [req-cb4c5de8-3089-4eaa-b58f-40f59d2dee17 req-0f7d5558-31b7-48ac-b11b-6a9995fb0331 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Updating instance_info_cache with network_info: [{"id": "c42db3e1-640d-4925-b5a3-adb5ddbd8177", "address": "fa:16:3e:65:14:e0", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc42db3e1-64", "ovs_interfaceid": "c42db3e1-640d-4925-b5a3-adb5ddbd8177", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.349397] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e0d02e-d4bb-6736-fc2f-ef2b7c157265, 'name': SearchDatastore_Task, 'duration_secs': 0.023411} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.350169] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.350169] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1633.350312] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.350414] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.350587] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1633.350864] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-97c3f483-7dc2-4407-a210-b386be2d4149 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.363675] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1633.363838] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1633.364749] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22eb034e-ffc4-401c-8e61-609f37b06390 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.370286] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1633.370286] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f26768-efba-f377-54fd-5585fd19b23d" [ 1633.370286] env[63371]: _type = "Task" [ 1633.370286] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.378312] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f26768-efba-f377-54fd-5585fd19b23d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.502700] env[63371]: DEBUG oslo_concurrency.lockutils [None req-820fcf79-5e86-47aa-99fa-63b0c713d8b0 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1633.504306] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.849s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.506748] env[63371]: INFO nova.compute.claims [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1633.681678] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774350, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.709322] env[63371]: DEBUG nova.network.neutron [-] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.763022] env[63371]: DEBUG oslo_concurrency.lockutils [req-cb4c5de8-3089-4eaa-b58f-40f59d2dee17 req-0f7d5558-31b7-48ac-b11b-6a9995fb0331 service nova] Releasing lock "refresh_cache-40644960-1400-4dc6-9f2b-78afb7492a8d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.880867] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f26768-efba-f377-54fd-5585fd19b23d, 'name': SearchDatastore_Task, 'duration_secs': 0.020852} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.881736] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b21dc1a3-930a-4066-9f97-56ba490ae80c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.887198] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1633.887198] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ea5859-4428-4d0f-02a6-2cbac92829fc" [ 1633.887198] env[63371]: _type = "Task" [ 1633.887198] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.895796] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ea5859-4428-4d0f-02a6-2cbac92829fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.183134] env[63371]: DEBUG oslo_vmware.api [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774350, 'name': PowerOnVM_Task, 'duration_secs': 1.426132} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.183134] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1634.183134] env[63371]: INFO nova.compute.manager [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Took 8.88 seconds to spawn the instance on the hypervisor. [ 1634.183694] env[63371]: DEBUG nova.compute.manager [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1634.184151] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3633fc0-3cca-45f0-a105-47c519148d64 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.212859] env[63371]: INFO nova.compute.manager [-] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Took 1.38 seconds to deallocate network for instance. [ 1634.400176] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ea5859-4428-4d0f-02a6-2cbac92829fc, 'name': SearchDatastore_Task, 'duration_secs': 0.013407} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.400453] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1634.400797] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 40644960-1400-4dc6-9f2b-78afb7492a8d/40644960-1400-4dc6-9f2b-78afb7492a8d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1634.401016] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afc5c293-ac9b-4c4c-b909-c29483bb3cfd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.408380] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1634.408380] env[63371]: value = "task-1774351" [ 1634.408380] env[63371]: _type = "Task" [ 1634.408380] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.417285] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774351, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.453951] env[63371]: DEBUG nova.compute.manager [req-263f53ec-457f-418a-8278-b2069f277d07 req-d5b21c15-b1d5-4ac6-8ab5-94b909b8b00c service nova] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Received event network-vif-deleted-2a9f10ee-aa45-47a6-81cc-6a16a7e15445 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1634.704287] env[63371]: INFO nova.compute.manager [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Took 44.36 seconds to build instance. [ 1634.723087] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.914776] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e293e24-b3bc-45d8-ad36-80bd8ae33e08 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.921374] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774351, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.927699] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f680f5c6-2957-4802-b066-79416bdf4653 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.960438] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b408e5-15b1-418c-8ab2-bb8833544155 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.970069] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a426976-0f35-448a-bc49-3569cc4fa2fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.985105] env[63371]: DEBUG nova.compute.provider_tree [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1635.206738] env[63371]: DEBUG oslo_concurrency.lockutils [None req-496c5067-136f-4a93-8ca3-1d1b52d4b8fd tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "0cd2018f-7a54-4458-b5fd-353ab75ffbfd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.871s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.421283] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774351, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597323} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.421544] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 40644960-1400-4dc6-9f2b-78afb7492a8d/40644960-1400-4dc6-9f2b-78afb7492a8d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1635.421778] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1635.422047] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a3aac9e9-5739-459a-a017-54fb3278299a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.428667] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1635.428667] env[63371]: value = "task-1774352" [ 1635.428667] env[63371]: _type = "Task" [ 1635.428667] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.436020] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774352, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.488719] env[63371]: DEBUG nova.scheduler.client.report [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1635.938893] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774352, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089835} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.939186] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1635.940040] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1382601-4476-42e9-bafe-dd9a81c6d98d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.962269] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 40644960-1400-4dc6-9f2b-78afb7492a8d/40644960-1400-4dc6-9f2b-78afb7492a8d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1635.962582] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee58fff0-ebd1-47ba-b2c9-d57915106512 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.986177] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1635.986177] env[63371]: value = "task-1774353" [ 1635.986177] env[63371]: _type = "Task" [ 1635.986177] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.995812] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.995812] env[63371]: DEBUG nova.compute.manager [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1635.997456] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774353, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.997715] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.657s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.997901] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.999746] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.114s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.999924] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.001584] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.965s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.001863] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.003306] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.770s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.003498] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.005029] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.007s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.006647] env[63371]: INFO nova.compute.claims [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1636.042143] env[63371]: INFO nova.scheduler.client.report [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted allocations for instance 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956 [ 1636.042143] env[63371]: INFO nova.scheduler.client.report [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleted allocations for instance e6cd62ce-f6d2-4e5b-acbc-7527a94e0932 [ 1636.056180] env[63371]: INFO nova.scheduler.client.report [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Deleted allocations for instance 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f [ 1636.064868] env[63371]: INFO nova.scheduler.client.report [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Deleted allocations for instance 3bd1c148-a48d-402c-bd76-2cb1d38b49f7 [ 1636.496570] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774353, 'name': ReconfigVM_Task, 'duration_secs': 0.347626} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.496958] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 40644960-1400-4dc6-9f2b-78afb7492a8d/40644960-1400-4dc6-9f2b-78afb7492a8d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1636.497589] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f432b047-a909-48cb-8b4e-da8f14ee7adc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.504466] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1636.504466] env[63371]: value = "task-1774354" [ 1636.504466] env[63371]: _type = "Task" [ 1636.504466] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.516316] env[63371]: DEBUG nova.compute.utils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1636.519630] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774354, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.520249] env[63371]: DEBUG nova.compute.manager [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1636.520420] env[63371]: DEBUG nova.network.neutron [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1636.554455] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d74a5d2-6573-4a4d-87de-8a33e15c3501 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e6cd62ce-f6d2-4e5b-acbc-7527a94e0932" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.342s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.556303] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5d3aae23-3700-4a51-b113-438ed10f8587 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "12e393d7-e8d5-4a9a-bad7-3cfffbb9d956" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.444s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.562948] env[63371]: DEBUG nova.policy [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31b76ca90f31495287b332ebb3001dff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e96348bcfea1455dad72945c7c36f027', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1636.569371] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dcd8cfc2-b9bd-4986-a201-b0fe3845c812 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.173s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.572704] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8c5cdf50-5354-4d82-a550-cafca5dffa10 tempest-ServersTestMultiNic-298145224 tempest-ServersTestMultiNic-298145224-project-member] Lock "3bd1c148-a48d-402c-bd76-2cb1d38b49f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.912s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.703033] env[63371]: DEBUG oslo_concurrency.lockutils [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "e912c210-3ae1-47ce-b9cd-afebf6195606" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.703426] env[63371]: DEBUG oslo_concurrency.lockutils [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e912c210-3ae1-47ce-b9cd-afebf6195606" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.703669] env[63371]: DEBUG oslo_concurrency.lockutils [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "e912c210-3ae1-47ce-b9cd-afebf6195606-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.703933] env[63371]: DEBUG oslo_concurrency.lockutils [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e912c210-3ae1-47ce-b9cd-afebf6195606-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.704180] env[63371]: DEBUG oslo_concurrency.lockutils [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e912c210-3ae1-47ce-b9cd-afebf6195606-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.706619] env[63371]: INFO nova.compute.manager [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Terminating instance [ 1636.711073] env[63371]: DEBUG nova.compute.manager [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1636.711399] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1636.712373] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774f5a85-7889-45df-b64d-b2b54eb0d349 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.720182] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1636.720474] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-701f6af5-f9bd-49e2-87c1-b34da791baaa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.726872] env[63371]: DEBUG oslo_vmware.api [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1636.726872] env[63371]: value = "task-1774355" [ 1636.726872] env[63371]: _type = "Task" [ 1636.726872] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.736516] env[63371]: DEBUG oslo_vmware.api [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.888115] env[63371]: DEBUG nova.network.neutron [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Successfully created port: 826bbbf2-7d7e-47d0-9516-4cb91c3d94a7 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1637.015374] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774354, 'name': Rename_Task, 'duration_secs': 0.177319} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.015737] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1637.016000] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8adda4d6-9493-4cb4-ad67-e9b3ced5a9c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.025703] env[63371]: DEBUG nova.compute.manager [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1637.027524] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1637.027524] env[63371]: value = "task-1774356" [ 1637.027524] env[63371]: _type = "Task" [ 1637.027524] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.040876] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774356, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.240450] env[63371]: DEBUG oslo_vmware.api [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.352277] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "0cd2018f-7a54-4458-b5fd-353ab75ffbfd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.352804] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "0cd2018f-7a54-4458-b5fd-353ab75ffbfd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.353170] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "0cd2018f-7a54-4458-b5fd-353ab75ffbfd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.355081] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "0cd2018f-7a54-4458-b5fd-353ab75ffbfd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.355081] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "0cd2018f-7a54-4458-b5fd-353ab75ffbfd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.357164] env[63371]: INFO nova.compute.manager [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Terminating instance [ 1637.359982] env[63371]: DEBUG nova.compute.manager [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1637.360355] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1637.362275] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f5a8fe-d664-4ce9-9421-e9923eb7c49b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.378808] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1637.378808] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fce60299-5030-4966-9ddf-f3a05e9ad726 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.384064] env[63371]: DEBUG oslo_vmware.api [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1637.384064] env[63371]: value = "task-1774357" [ 1637.384064] env[63371]: _type = "Task" [ 1637.384064] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.393665] env[63371]: DEBUG oslo_vmware.api [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774357, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.534422] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221bbec7-fd8c-4a0b-a54f-368f705d2ae0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.548776] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.548776] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba59935a-1b41-4ec2-9e4c-db15a496bec7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.584922] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c3bcee-7de2-491b-8c69-4e4cb5a5ccdf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.594097] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b416f9a4-7ac5-466c-a19e-17d99bcac53b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.598240] env[63371]: DEBUG oslo_concurrency.lockutils [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.598514] env[63371]: DEBUG oslo_concurrency.lockutils [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.598765] env[63371]: DEBUG oslo_concurrency.lockutils [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.598974] env[63371]: DEBUG oslo_concurrency.lockutils [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.599184] env[63371]: DEBUG oslo_concurrency.lockutils [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.601417] env[63371]: INFO nova.compute.manager [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Terminating instance [ 1637.604802] env[63371]: DEBUG nova.compute.manager [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1637.604802] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1637.605467] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719252fa-e775-42c2-89a4-21822965c60e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.617295] env[63371]: DEBUG nova.compute.provider_tree [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1637.623121] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1637.623409] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0353d5b3-6dfc-4fb9-a65c-8127cef412f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.630833] env[63371]: DEBUG oslo_vmware.api [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1637.630833] env[63371]: value = "task-1774358" [ 1637.630833] env[63371]: _type = "Task" [ 1637.630833] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.638682] env[63371]: DEBUG oslo_vmware.api [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774358, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.738967] env[63371]: DEBUG oslo_vmware.api [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774355, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.896678] env[63371]: DEBUG oslo_vmware.api [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774357, 'name': PowerOffVM_Task, 'duration_secs': 0.198478} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.896950] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1637.897130] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1637.897392] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48235c7d-9f24-46c3-ba29-b54128894166 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.040151] env[63371]: DEBUG nova.compute.manager [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1638.046688] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774356, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.068997] env[63371]: DEBUG nova.virt.hardware [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1638.069274] env[63371]: DEBUG nova.virt.hardware [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1638.069430] env[63371]: DEBUG nova.virt.hardware [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1638.069609] env[63371]: DEBUG nova.virt.hardware [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1638.069782] env[63371]: DEBUG nova.virt.hardware [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1638.069936] env[63371]: DEBUG nova.virt.hardware [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1638.070165] env[63371]: DEBUG nova.virt.hardware [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1638.070322] env[63371]: DEBUG nova.virt.hardware [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1638.070505] env[63371]: DEBUG nova.virt.hardware [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1638.070641] env[63371]: DEBUG nova.virt.hardware [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1638.070808] env[63371]: DEBUG nova.virt.hardware [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1638.071744] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad510bc-069c-4054-995a-38ab7e2ea828 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.079595] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c55bfb-454c-4cb7-8ed7-9479728ed59f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.121836] env[63371]: DEBUG nova.scheduler.client.report [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1638.141459] env[63371]: DEBUG oslo_vmware.api [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774358, 'name': PowerOffVM_Task, 'duration_secs': 0.286649} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.142187] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1638.142360] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1638.142613] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-748e15a6-edcd-44c1-9cd0-fad640500184 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.238752] env[63371]: DEBUG oslo_vmware.api [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774355, 'name': PowerOffVM_Task, 'duration_secs': 1.190554} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.239034] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1638.239222] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1638.239486] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-87253f0a-1f6d-4576-bb52-f66c687fa94b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.300611] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1638.300750] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1638.300949] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Deleting the datastore file [datastore1] 0cd2018f-7a54-4458-b5fd-353ab75ffbfd {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1638.301238] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6c4e8eb4-0078-4a0b-9ecd-d002cab366c1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.305302] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1638.305501] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1638.305772] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Deleting the datastore file [datastore1] f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1638.306068] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5ab2275e-7fcc-4274-85b8-50de17bacd6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.311954] env[63371]: DEBUG oslo_vmware.api [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for the task: (returnval){ [ 1638.311954] env[63371]: value = "task-1774362" [ 1638.311954] env[63371]: _type = "Task" [ 1638.311954] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.313326] env[63371]: DEBUG oslo_vmware.api [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for the task: (returnval){ [ 1638.313326] env[63371]: value = "task-1774363" [ 1638.313326] env[63371]: _type = "Task" [ 1638.313326] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.325312] env[63371]: DEBUG oslo_vmware.api [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774362, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.332418] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1638.332684] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1638.332871] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleting the datastore file [datastore1] e912c210-3ae1-47ce-b9cd-afebf6195606 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1638.333230] env[63371]: DEBUG oslo_vmware.api [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774363, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.333479] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-810e2703-5f39-4a19-a1ec-42e40fc6669f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.344993] env[63371]: DEBUG oslo_vmware.api [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1638.344993] env[63371]: value = "task-1774364" [ 1638.344993] env[63371]: _type = "Task" [ 1638.344993] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.354724] env[63371]: DEBUG oslo_vmware.api [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774364, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.544481] env[63371]: DEBUG oslo_vmware.api [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774356, 'name': PowerOnVM_Task, 'duration_secs': 1.364176} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.545275] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1638.545537] env[63371]: INFO nova.compute.manager [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Took 8.57 seconds to spawn the instance on the hypervisor. [ 1638.545733] env[63371]: DEBUG nova.compute.manager [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1638.546603] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-869d7cfd-930d-4075-b780-53bdb1840d15 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.627720] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.623s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.628254] env[63371]: DEBUG nova.compute.manager [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1638.635027] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.089s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.635027] env[63371]: INFO nova.compute.claims [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1638.744577] env[63371]: DEBUG nova.compute.manager [req-7c29f31a-4013-4964-a3fc-3f212d27fefb req-b2166816-0f25-438a-a09a-542d08f76b81 service nova] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Received event network-vif-plugged-826bbbf2-7d7e-47d0-9516-4cb91c3d94a7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1638.744809] env[63371]: DEBUG oslo_concurrency.lockutils [req-7c29f31a-4013-4964-a3fc-3f212d27fefb req-b2166816-0f25-438a-a09a-542d08f76b81 service nova] Acquiring lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.745036] env[63371]: DEBUG oslo_concurrency.lockutils [req-7c29f31a-4013-4964-a3fc-3f212d27fefb req-b2166816-0f25-438a-a09a-542d08f76b81 service nova] Lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.745304] env[63371]: DEBUG oslo_concurrency.lockutils [req-7c29f31a-4013-4964-a3fc-3f212d27fefb req-b2166816-0f25-438a-a09a-542d08f76b81 service nova] Lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.745420] env[63371]: DEBUG nova.compute.manager [req-7c29f31a-4013-4964-a3fc-3f212d27fefb req-b2166816-0f25-438a-a09a-542d08f76b81 service nova] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] No waiting events found dispatching network-vif-plugged-826bbbf2-7d7e-47d0-9516-4cb91c3d94a7 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1638.745530] env[63371]: WARNING nova.compute.manager [req-7c29f31a-4013-4964-a3fc-3f212d27fefb req-b2166816-0f25-438a-a09a-542d08f76b81 service nova] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Received unexpected event network-vif-plugged-826bbbf2-7d7e-47d0-9516-4cb91c3d94a7 for instance with vm_state building and task_state spawning. [ 1638.781632] env[63371]: DEBUG nova.network.neutron [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Successfully updated port: 826bbbf2-7d7e-47d0-9516-4cb91c3d94a7 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1638.828869] env[63371]: DEBUG oslo_vmware.api [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Task: {'id': task-1774363, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.339604} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.833429] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1638.833578] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1638.833674] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1638.833842] env[63371]: INFO nova.compute.manager [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1638.834130] env[63371]: DEBUG oslo.service.loopingcall [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1638.834337] env[63371]: DEBUG oslo_vmware.api [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Task: {'id': task-1774362, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.337603} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.834547] env[63371]: DEBUG nova.compute.manager [-] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1638.834644] env[63371]: DEBUG nova.network.neutron [-] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1638.837063] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1638.837261] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1638.837437] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1638.837604] env[63371]: INFO nova.compute.manager [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Took 1.48 seconds to destroy the instance on the hypervisor. [ 1638.837830] env[63371]: DEBUG oslo.service.loopingcall [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1638.838038] env[63371]: DEBUG nova.compute.manager [-] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1638.838137] env[63371]: DEBUG nova.network.neutron [-] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1638.856153] env[63371]: DEBUG oslo_vmware.api [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774364, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.32417} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.856386] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1638.856571] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1638.856823] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1638.857008] env[63371]: INFO nova.compute.manager [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Took 2.15 seconds to destroy the instance on the hypervisor. [ 1638.857280] env[63371]: DEBUG oslo.service.loopingcall [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1638.857427] env[63371]: DEBUG nova.compute.manager [-] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1638.857521] env[63371]: DEBUG nova.network.neutron [-] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1638.975812] env[63371]: DEBUG oslo_vmware.rw_handles [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52623404-7621-c678-0e77-687aca8dd4ef/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1638.976767] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db2d9a9-f4f1-4491-81c8-d15089c5efd0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.984622] env[63371]: DEBUG oslo_vmware.rw_handles [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52623404-7621-c678-0e77-687aca8dd4ef/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1638.984804] env[63371]: ERROR oslo_vmware.rw_handles [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52623404-7621-c678-0e77-687aca8dd4ef/disk-0.vmdk due to incomplete transfer. [ 1638.985037] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0967b56a-5e48-4633-b72c-b80f9802da63 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.992119] env[63371]: DEBUG oslo_vmware.rw_handles [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52623404-7621-c678-0e77-687aca8dd4ef/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1638.992316] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Uploaded image 1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1638.994738] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1638.995012] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-067b36d9-5dc0-4da5-9fdd-6113d96a3624 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.000669] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1639.000669] env[63371]: value = "task-1774365" [ 1639.000669] env[63371]: _type = "Task" [ 1639.000669] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.008570] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774365, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.064606] env[63371]: INFO nova.compute.manager [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Took 39.29 seconds to build instance. [ 1639.142696] env[63371]: DEBUG nova.compute.utils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1639.149275] env[63371]: DEBUG nova.compute.manager [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1639.149275] env[63371]: DEBUG nova.network.neutron [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1639.242247] env[63371]: DEBUG nova.policy [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b57536ed1e3e48cf86a8ec224a0aa3d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '22fd1634f21c45efa8606cf6c339a790', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1639.247191] env[63371]: DEBUG nova.compute.manager [req-f0ceba61-dfe1-4175-9cfa-31059e823902 req-c14b66c5-725c-482b-aa46-ec6a064218be service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Received event network-vif-deleted-225db88c-9e6c-40e6-a30e-a3830f2c411c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1639.247454] env[63371]: INFO nova.compute.manager [req-f0ceba61-dfe1-4175-9cfa-31059e823902 req-c14b66c5-725c-482b-aa46-ec6a064218be service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Neutron deleted interface 225db88c-9e6c-40e6-a30e-a3830f2c411c; detaching it from the instance and deleting it from the info cache [ 1639.247758] env[63371]: DEBUG nova.network.neutron [req-f0ceba61-dfe1-4175-9cfa-31059e823902 req-c14b66c5-725c-482b-aa46-ec6a064218be service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.284578] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.284906] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.285134] env[63371]: DEBUG nova.network.neutron [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1639.511592] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774365, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.524183] env[63371]: DEBUG nova.network.neutron [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Successfully created port: 8fb2ce38-fb30-464a-9fa3-42bd21ffe84c {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1639.565908] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1972f1e1-1a90-418d-84ba-39c662cc3b2c tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "40644960-1400-4dc6-9f2b-78afb7492a8d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.795s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1639.652268] env[63371]: DEBUG nova.compute.manager [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1639.701987] env[63371]: DEBUG nova.network.neutron [-] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.711770] env[63371]: DEBUG nova.network.neutron [-] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.712993] env[63371]: DEBUG nova.network.neutron [-] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.752599] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a83bba8f-ce6b-41aa-866a-cbd0109ab6a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.769416] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8adf77-c4b7-4bee-bd1b-3ed526b17ad7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.816038] env[63371]: DEBUG nova.compute.manager [req-f0ceba61-dfe1-4175-9cfa-31059e823902 req-c14b66c5-725c-482b-aa46-ec6a064218be service nova] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Detach interface failed, port_id=225db88c-9e6c-40e6-a30e-a3830f2c411c, reason: Instance e912c210-3ae1-47ce-b9cd-afebf6195606 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1639.884981] env[63371]: DEBUG nova.network.neutron [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1640.020017] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774365, 'name': Destroy_Task, 'duration_secs': 0.759673} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.022805] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Destroyed the VM [ 1640.023673] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1640.024888] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-623198a8-f352-408d-9c9a-598f34b96edb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.031570] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1640.031570] env[63371]: value = "task-1774366" [ 1640.031570] env[63371]: _type = "Task" [ 1640.031570] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.043360] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774366, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.072201] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b440c283-6c42-419c-8803-333332aadd6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.082566] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfed220-7f67-47d1-9a56-8f96af7c5064 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.154322] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e0201f-8134-4463-8470-2ae719cb2727 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.174606] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a9bd24-c6ef-40b5-8282-8fc224325f85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.201806] env[63371]: DEBUG nova.compute.provider_tree [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1640.207273] env[63371]: INFO nova.compute.manager [-] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Took 1.37 seconds to deallocate network for instance. [ 1640.217133] env[63371]: DEBUG nova.network.neutron [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance_info_cache with network_info: [{"id": "826bbbf2-7d7e-47d0-9516-4cb91c3d94a7", "address": "fa:16:3e:67:7c:99", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap826bbbf2-7d", "ovs_interfaceid": "826bbbf2-7d7e-47d0-9516-4cb91c3d94a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1640.217133] env[63371]: INFO nova.compute.manager [-] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Took 1.38 seconds to deallocate network for instance. [ 1640.217275] env[63371]: INFO nova.compute.manager [-] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Took 1.36 seconds to deallocate network for instance. [ 1640.543105] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774366, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.665313] env[63371]: DEBUG nova.compute.manager [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1640.691647] env[63371]: DEBUG nova.virt.hardware [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1640.691967] env[63371]: DEBUG nova.virt.hardware [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1640.692149] env[63371]: DEBUG nova.virt.hardware [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1640.692355] env[63371]: DEBUG nova.virt.hardware [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1640.692510] env[63371]: DEBUG nova.virt.hardware [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1640.692664] env[63371]: DEBUG nova.virt.hardware [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1640.692900] env[63371]: DEBUG nova.virt.hardware [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1640.693105] env[63371]: DEBUG nova.virt.hardware [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1640.693339] env[63371]: DEBUG nova.virt.hardware [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1640.693545] env[63371]: DEBUG nova.virt.hardware [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1640.693756] env[63371]: DEBUG nova.virt.hardware [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1640.694759] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bf26f30-d4b1-4e4a-95df-96faaae46f6f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.702852] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89caa9ef-23ac-4b9a-9b44-f89b1aa3cd83 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.707515] env[63371]: DEBUG nova.scheduler.client.report [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1640.722964] env[63371]: DEBUG oslo_concurrency.lockutils [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.723228] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1640.723898] env[63371]: DEBUG nova.compute.manager [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Instance network_info: |[{"id": "826bbbf2-7d7e-47d0-9516-4cb91c3d94a7", "address": "fa:16:3e:67:7c:99", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap826bbbf2-7d", "ovs_interfaceid": "826bbbf2-7d7e-47d0-9516-4cb91c3d94a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1640.724356] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.724635] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:67:7c:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '826bbbf2-7d7e-47d0-9516-4cb91c3d94a7', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1640.732432] env[63371]: DEBUG oslo.service.loopingcall [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1640.733878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.734155] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1640.734591] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-00d9bc25-7f8b-4b04-b2c0-9cb4071ed9d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.754762] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1640.754762] env[63371]: value = "task-1774367" [ 1640.754762] env[63371]: _type = "Task" [ 1640.754762] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.765879] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774367, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.787796] env[63371]: DEBUG nova.compute.manager [req-cd29859e-c23a-41a1-81ae-b756904683c6 req-69fc3866-e978-4305-ab77-b1a97416f2d9 service nova] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Received event network-changed-826bbbf2-7d7e-47d0-9516-4cb91c3d94a7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1640.788056] env[63371]: DEBUG nova.compute.manager [req-cd29859e-c23a-41a1-81ae-b756904683c6 req-69fc3866-e978-4305-ab77-b1a97416f2d9 service nova] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Refreshing instance network info cache due to event network-changed-826bbbf2-7d7e-47d0-9516-4cb91c3d94a7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1640.788342] env[63371]: DEBUG oslo_concurrency.lockutils [req-cd29859e-c23a-41a1-81ae-b756904683c6 req-69fc3866-e978-4305-ab77-b1a97416f2d9 service nova] Acquiring lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.788521] env[63371]: DEBUG oslo_concurrency.lockutils [req-cd29859e-c23a-41a1-81ae-b756904683c6 req-69fc3866-e978-4305-ab77-b1a97416f2d9 service nova] Acquired lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.788737] env[63371]: DEBUG nova.network.neutron [req-cd29859e-c23a-41a1-81ae-b756904683c6 req-69fc3866-e978-4305-ab77-b1a97416f2d9 service nova] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Refreshing network info cache for port 826bbbf2-7d7e-47d0-9516-4cb91c3d94a7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1641.035844] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "40644960-1400-4dc6-9f2b-78afb7492a8d" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.035941] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "40644960-1400-4dc6-9f2b-78afb7492a8d" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.036098] env[63371]: INFO nova.compute.manager [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Shelving [ 1641.050417] env[63371]: DEBUG oslo_vmware.api [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774366, 'name': RemoveSnapshot_Task, 'duration_secs': 0.699578} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.051335] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1641.051651] env[63371]: INFO nova.compute.manager [None req-25575695-a107-4f0a-bf89-9e6fdf5fe02f tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Took 15.67 seconds to snapshot the instance on the hypervisor. [ 1641.138045] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "ee3ea0ef-cde9-4326-b564-1aa216e00751" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.138045] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "ee3ea0ef-cde9-4326-b564-1aa216e00751" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.184566] env[63371]: DEBUG nova.network.neutron [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Successfully updated port: 8fb2ce38-fb30-464a-9fa3-42bd21ffe84c {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1641.213147] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.581s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.213650] env[63371]: DEBUG nova.compute.manager [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1641.216073] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.118s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.217437] env[63371]: INFO nova.compute.claims [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1641.265836] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774367, 'name': CreateVM_Task, 'duration_secs': 0.359775} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.265836] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1641.266276] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.266442] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.266797] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1641.267136] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b08494b-3cb1-4f36-93ea-2c14ff129380 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.271825] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1641.271825] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52461c46-2c21-d363-d8ad-bbdf864d956f" [ 1641.271825] env[63371]: _type = "Task" [ 1641.271825] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.276801] env[63371]: DEBUG nova.compute.manager [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Received event network-vif-plugged-8fb2ce38-fb30-464a-9fa3-42bd21ffe84c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1641.277219] env[63371]: DEBUG oslo_concurrency.lockutils [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] Acquiring lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1641.277264] env[63371]: DEBUG oslo_concurrency.lockutils [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] Lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1641.277414] env[63371]: DEBUG oslo_concurrency.lockutils [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] Lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.277577] env[63371]: DEBUG nova.compute.manager [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] No waiting events found dispatching network-vif-plugged-8fb2ce38-fb30-464a-9fa3-42bd21ffe84c {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1641.277735] env[63371]: WARNING nova.compute.manager [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Received unexpected event network-vif-plugged-8fb2ce38-fb30-464a-9fa3-42bd21ffe84c for instance with vm_state building and task_state spawning. [ 1641.277888] env[63371]: DEBUG nova.compute.manager [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Received event network-changed-8fb2ce38-fb30-464a-9fa3-42bd21ffe84c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1641.278044] env[63371]: DEBUG nova.compute.manager [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Refreshing instance network info cache due to event network-changed-8fb2ce38-fb30-464a-9fa3-42bd21ffe84c. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1641.278222] env[63371]: DEBUG oslo_concurrency.lockutils [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] Acquiring lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.278352] env[63371]: DEBUG oslo_concurrency.lockutils [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] Acquired lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.278499] env[63371]: DEBUG nova.network.neutron [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Refreshing network info cache for port 8fb2ce38-fb30-464a-9fa3-42bd21ffe84c {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1641.284414] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52461c46-2c21-d363-d8ad-bbdf864d956f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.477198] env[63371]: DEBUG nova.network.neutron [req-cd29859e-c23a-41a1-81ae-b756904683c6 req-69fc3866-e978-4305-ab77-b1a97416f2d9 service nova] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updated VIF entry in instance network info cache for port 826bbbf2-7d7e-47d0-9516-4cb91c3d94a7. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1641.477592] env[63371]: DEBUG nova.network.neutron [req-cd29859e-c23a-41a1-81ae-b756904683c6 req-69fc3866-e978-4305-ab77-b1a97416f2d9 service nova] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance_info_cache with network_info: [{"id": "826bbbf2-7d7e-47d0-9516-4cb91c3d94a7", "address": "fa:16:3e:67:7c:99", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap826bbbf2-7d", "ovs_interfaceid": "826bbbf2-7d7e-47d0-9516-4cb91c3d94a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.549654] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1641.549904] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76dc8b4f-98e1-4854-a713-67b6ec1fd838 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.558079] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1641.558079] env[63371]: value = "task-1774368" [ 1641.558079] env[63371]: _type = "Task" [ 1641.558079] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.566627] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774368, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.641088] env[63371]: DEBUG nova.compute.manager [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1641.687305] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.722016] env[63371]: DEBUG nova.compute.utils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1641.725817] env[63371]: DEBUG nova.compute.manager [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1641.726014] env[63371]: DEBUG nova.network.neutron [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1641.787611] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52461c46-2c21-d363-d8ad-bbdf864d956f, 'name': SearchDatastore_Task, 'duration_secs': 0.054385} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.789509] env[63371]: DEBUG nova.policy [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '14b201b8d738471295e655e2ee2cad8d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2e693d73d70140c2ba065de2b60838c2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1641.791342] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.791651] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1641.791972] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.792196] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.792725] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1641.793186] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95a834d4-4c70-4a9a-ba98-b73880e1982f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.802366] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1641.802543] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1641.803881] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1c358a5-09a9-4417-aa61-3c20ebe44fab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.810216] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1641.810216] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523d67dc-ca77-fb85-f0ff-f00aee814939" [ 1641.810216] env[63371]: _type = "Task" [ 1641.810216] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.820561] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523d67dc-ca77-fb85-f0ff-f00aee814939, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.835980] env[63371]: DEBUG nova.network.neutron [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1641.973651] env[63371]: DEBUG nova.network.neutron [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.979739] env[63371]: DEBUG oslo_concurrency.lockutils [req-cd29859e-c23a-41a1-81ae-b756904683c6 req-69fc3866-e978-4305-ab77-b1a97416f2d9 service nova] Releasing lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.979955] env[63371]: DEBUG nova.compute.manager [req-cd29859e-c23a-41a1-81ae-b756904683c6 req-69fc3866-e978-4305-ab77-b1a97416f2d9 service nova] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Received event network-vif-deleted-3eac2e62-a172-4d1d-bc6b-d5d8b5849cc1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1641.980163] env[63371]: DEBUG nova.compute.manager [req-cd29859e-c23a-41a1-81ae-b756904683c6 req-69fc3866-e978-4305-ab77-b1a97416f2d9 service nova] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Received event network-vif-deleted-91e0886d-8e37-4f74-9933-1f1b49ae72e4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1642.069285] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774368, 'name': PowerOffVM_Task, 'duration_secs': 0.253972} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.069548] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1642.070323] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec5d767b-c0ab-48be-9478-4737092ffe3a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.088714] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b9e1b6-efc9-4408-a214-bd13af6c6d99 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.121526] env[63371]: DEBUG nova.network.neutron [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Successfully created port: 165046e2-6d39-4a5c-9e2f-57619d3c8309 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1642.164820] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.227172] env[63371]: DEBUG nova.compute.manager [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1642.322258] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523d67dc-ca77-fb85-f0ff-f00aee814939, 'name': SearchDatastore_Task, 'duration_secs': 0.031066} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.323107] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28ff4ac7-a6ae-4469-91c3-1ce87610ae3c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.330246] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1642.330246] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52db693d-eea6-fb22-a2c1-443ee107423a" [ 1642.330246] env[63371]: _type = "Task" [ 1642.330246] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.337693] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52db693d-eea6-fb22-a2c1-443ee107423a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.475975] env[63371]: DEBUG oslo_concurrency.lockutils [req-13522257-baea-4eb1-98db-0f98099548da req-f8710ec3-0f0a-45a5-989c-a14601a07893 service nova] Releasing lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.476348] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquired lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1642.476508] env[63371]: DEBUG nova.network.neutron [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1642.527875] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49cde630-ff36-48e8-951e-eddb377fd323 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.535283] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a412a58c-569e-4dd1-946d-7d5fc58546a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.566362] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a41bdd-065b-40e3-a69d-36cfcbd7a132 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.572997] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5883908-d997-47df-83a9-eec528cf4c08 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.586062] env[63371]: DEBUG nova.compute.provider_tree [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1642.602207] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1642.602427] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-84ab0ee0-755a-48d3-8814-c7f122562e77 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.608513] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1642.608513] env[63371]: value = "task-1774369" [ 1642.608513] env[63371]: _type = "Task" [ 1642.608513] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.616745] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774369, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.841043] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52db693d-eea6-fb22-a2c1-443ee107423a, 'name': SearchDatastore_Task, 'duration_secs': 0.009893} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.841326] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1642.841578] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e1bc4623-f6b5-4440-a58d-594e9cbe3628/e1bc4623-f6b5-4440-a58d-594e9cbe3628.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1642.841894] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-014a0055-0b79-4d3a-bdc4-46e1635576c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.848763] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1642.848763] env[63371]: value = "task-1774370" [ 1642.848763] env[63371]: _type = "Task" [ 1642.848763] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.856386] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774370, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.006674] env[63371]: DEBUG nova.network.neutron [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1643.088928] env[63371]: DEBUG nova.scheduler.client.report [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1643.120201] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774369, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.178567] env[63371]: DEBUG nova.network.neutron [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Updating instance_info_cache with network_info: [{"id": "8fb2ce38-fb30-464a-9fa3-42bd21ffe84c", "address": "fa:16:3e:61:26:f7", "network": {"id": "1dc1e53b-b865-4642-b667-e771524c6438", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-562533890-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fd1634f21c45efa8606cf6c339a790", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fb2ce38-fb", "ovs_interfaceid": "8fb2ce38-fb30-464a-9fa3-42bd21ffe84c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.240727] env[63371]: DEBUG nova.compute.manager [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1643.272408] env[63371]: DEBUG nova.virt.hardware [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1643.272590] env[63371]: DEBUG nova.virt.hardware [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1643.272771] env[63371]: DEBUG nova.virt.hardware [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1643.273028] env[63371]: DEBUG nova.virt.hardware [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1643.273489] env[63371]: DEBUG nova.virt.hardware [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1643.273489] env[63371]: DEBUG nova.virt.hardware [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1643.273489] env[63371]: DEBUG nova.virt.hardware [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1643.273638] env[63371]: DEBUG nova.virt.hardware [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1643.273799] env[63371]: DEBUG nova.virt.hardware [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1643.274378] env[63371]: DEBUG nova.virt.hardware [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1643.274378] env[63371]: DEBUG nova.virt.hardware [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1643.275044] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482e3ef2-5f1f-4647-a83f-9a9700aa134e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.283656] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa944324-6de5-4b53-9975-66f0726f26e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.360555] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774370, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489153} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.360815] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e1bc4623-f6b5-4440-a58d-594e9cbe3628/e1bc4623-f6b5-4440-a58d-594e9cbe3628.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1643.361022] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1643.361285] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-760a3180-1a13-4c0e-a9a4-fd69db83c2f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.367093] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1643.367093] env[63371]: value = "task-1774371" [ 1643.367093] env[63371]: _type = "Task" [ 1643.367093] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.374773] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774371, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.532047] env[63371]: DEBUG nova.compute.manager [req-05a12002-57ec-4e2c-bc53-1c0f113768c5 req-0f0068b5-ba0a-4dd1-aa8c-bf89a900ed71 service nova] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Received event network-vif-plugged-165046e2-6d39-4a5c-9e2f-57619d3c8309 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1643.532275] env[63371]: DEBUG oslo_concurrency.lockutils [req-05a12002-57ec-4e2c-bc53-1c0f113768c5 req-0f0068b5-ba0a-4dd1-aa8c-bf89a900ed71 service nova] Acquiring lock "3065fc71-f127-43b7-83b7-70140f29965b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1643.532479] env[63371]: DEBUG oslo_concurrency.lockutils [req-05a12002-57ec-4e2c-bc53-1c0f113768c5 req-0f0068b5-ba0a-4dd1-aa8c-bf89a900ed71 service nova] Lock "3065fc71-f127-43b7-83b7-70140f29965b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.532644] env[63371]: DEBUG oslo_concurrency.lockutils [req-05a12002-57ec-4e2c-bc53-1c0f113768c5 req-0f0068b5-ba0a-4dd1-aa8c-bf89a900ed71 service nova] Lock "3065fc71-f127-43b7-83b7-70140f29965b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.532808] env[63371]: DEBUG nova.compute.manager [req-05a12002-57ec-4e2c-bc53-1c0f113768c5 req-0f0068b5-ba0a-4dd1-aa8c-bf89a900ed71 service nova] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] No waiting events found dispatching network-vif-plugged-165046e2-6d39-4a5c-9e2f-57619d3c8309 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1643.532969] env[63371]: WARNING nova.compute.manager [req-05a12002-57ec-4e2c-bc53-1c0f113768c5 req-0f0068b5-ba0a-4dd1-aa8c-bf89a900ed71 service nova] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Received unexpected event network-vif-plugged-165046e2-6d39-4a5c-9e2f-57619d3c8309 for instance with vm_state building and task_state spawning. [ 1643.594695] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.379s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.595230] env[63371]: DEBUG nova.compute.manager [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1643.597744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.875s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.598015] env[63371]: DEBUG nova.objects.instance [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lazy-loading 'resources' on Instance uuid 3027832f-12cd-4255-b699-bcbb254a6c5a {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1643.614939] env[63371]: DEBUG nova.network.neutron [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Successfully updated port: 165046e2-6d39-4a5c-9e2f-57619d3c8309 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1643.619281] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774369, 'name': CreateSnapshot_Task, 'duration_secs': 0.52159} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.619721] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1643.620474] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225b968c-bb25-41cf-af32-0957d2e4f8ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.681457] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Releasing lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.681810] env[63371]: DEBUG nova.compute.manager [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Instance network_info: |[{"id": "8fb2ce38-fb30-464a-9fa3-42bd21ffe84c", "address": "fa:16:3e:61:26:f7", "network": {"id": "1dc1e53b-b865-4642-b667-e771524c6438", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-562533890-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fd1634f21c45efa8606cf6c339a790", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fb2ce38-fb", "ovs_interfaceid": "8fb2ce38-fb30-464a-9fa3-42bd21ffe84c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1643.682294] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:26:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d2e4070-a78e-4d08-a104-b6312ab65577', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8fb2ce38-fb30-464a-9fa3-42bd21ffe84c', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1643.689728] env[63371]: DEBUG oslo.service.loopingcall [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1643.689937] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1643.690398] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1bcd679b-952c-45ad-9338-822c0794ab4d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.709396] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1643.709396] env[63371]: value = "task-1774372" [ 1643.709396] env[63371]: _type = "Task" [ 1643.709396] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.718239] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774372, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.876745] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774371, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063476} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.877049] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1643.877886] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39c98fc-e6ed-4ce3-8218-fe2f25b992c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.899623] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] e1bc4623-f6b5-4440-a58d-594e9cbe3628/e1bc4623-f6b5-4440-a58d-594e9cbe3628.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1643.899913] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dc67392b-8b2a-4173-9647-7702b287f6c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.918275] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1643.918275] env[63371]: value = "task-1774373" [ 1643.918275] env[63371]: _type = "Task" [ 1643.918275] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.925956] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774373, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.101336] env[63371]: DEBUG nova.compute.utils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1644.105902] env[63371]: DEBUG nova.compute.manager [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1644.105902] env[63371]: DEBUG nova.network.neutron [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1644.120880] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "refresh_cache-3065fc71-f127-43b7-83b7-70140f29965b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1644.121041] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired lock "refresh_cache-3065fc71-f127-43b7-83b7-70140f29965b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.121186] env[63371]: DEBUG nova.network.neutron [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1644.138176] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1644.140905] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3f6346d1-5858-4462-bd2d-a41a8e3787b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.148613] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1644.148613] env[63371]: value = "task-1774374" [ 1644.148613] env[63371]: _type = "Task" [ 1644.148613] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.149934] env[63371]: DEBUG nova.policy [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9bddd6cb483141dea99b67330700bc2d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f5c0a56fc6994e8b94d4bc92ab09726e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1644.163878] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774374, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.221900] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774372, 'name': CreateVM_Task, 'duration_secs': 0.396986} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.224723] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1644.224723] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1644.224723] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.224723] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1644.224723] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b82872f-dc5b-4b5c-8fb9-d717271e5093 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.231108] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1644.231108] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5261e802-54e6-74cc-1590-0278eb1ec51e" [ 1644.231108] env[63371]: _type = "Task" [ 1644.231108] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.238835] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5261e802-54e6-74cc-1590-0278eb1ec51e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.429342] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774373, 'name': ReconfigVM_Task, 'duration_secs': 0.3086} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.430551] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Reconfigured VM instance instance-0000004c to attach disk [datastore1] e1bc4623-f6b5-4440-a58d-594e9cbe3628/e1bc4623-f6b5-4440-a58d-594e9cbe3628.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1644.431731] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37002c68-2081-4dc5-8ece-2e9291a5ef62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.434298] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff8af002-1b64-44f6-878b-9440605ff247 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.442228] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af40674f-30fc-4bde-9647-3ef97120bf1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.445383] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1644.445383] env[63371]: value = "task-1774375" [ 1644.445383] env[63371]: _type = "Task" [ 1644.445383] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.472838] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ffb63c-4806-4c8e-93ed-4de8f82dfd54 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.478837] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774375, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.483403] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89a9aebc-e3da-474c-949c-637f2bd02b9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.487921] env[63371]: DEBUG nova.network.neutron [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Successfully created port: e6dc9116-b799-4666-8abb-a5e2ab4749e5 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1644.499402] env[63371]: DEBUG nova.compute.provider_tree [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1644.606539] env[63371]: DEBUG nova.compute.manager [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1644.660232] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774374, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.666564] env[63371]: DEBUG nova.network.neutron [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1644.741730] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5261e802-54e6-74cc-1590-0278eb1ec51e, 'name': SearchDatastore_Task, 'duration_secs': 0.027434} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.742076] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1644.742312] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1644.742541] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1644.742681] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1644.742871] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1644.743128] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9faff6b5-3709-429d-a498-8f7383788396 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.751106] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1644.751282] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1644.752025] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e20892e9-6a69-4ab0-ab07-3a1beb29d59a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.757072] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1644.757072] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520c99ae-9a26-9a1b-7f16-7f82a83308ea" [ 1644.757072] env[63371]: _type = "Task" [ 1644.757072] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.764363] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520c99ae-9a26-9a1b-7f16-7f82a83308ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.868479] env[63371]: DEBUG nova.network.neutron [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Updating instance_info_cache with network_info: [{"id": "165046e2-6d39-4a5c-9e2f-57619d3c8309", "address": "fa:16:3e:0a:5b:2b", "network": {"id": "9c25e5e9-468d-4d4c-93e0-c9815eff1c2e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-814005109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e693d73d70140c2ba065de2b60838c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165046e2-6d", "ovs_interfaceid": "165046e2-6d39-4a5c-9e2f-57619d3c8309", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1644.956206] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774375, 'name': Rename_Task, 'duration_secs': 0.151114} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.956487] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1644.956733] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-445144e6-ee20-4f16-8f56-06e8fcfc7f6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.963399] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1644.963399] env[63371]: value = "task-1774376" [ 1644.963399] env[63371]: _type = "Task" [ 1644.963399] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.970892] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774376, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.003365] env[63371]: DEBUG nova.scheduler.client.report [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1645.161194] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774374, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.267749] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520c99ae-9a26-9a1b-7f16-7f82a83308ea, 'name': SearchDatastore_Task, 'duration_secs': 0.010492} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.268590] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d569fe82-d4c3-471e-b19d-a977bf1a2dcc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.273949] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1645.273949] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521632e1-e8ed-5234-f3a9-0549b98b69ca" [ 1645.273949] env[63371]: _type = "Task" [ 1645.273949] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.282038] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521632e1-e8ed-5234-f3a9-0549b98b69ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.370820] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Releasing lock "refresh_cache-3065fc71-f127-43b7-83b7-70140f29965b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1645.371245] env[63371]: DEBUG nova.compute.manager [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Instance network_info: |[{"id": "165046e2-6d39-4a5c-9e2f-57619d3c8309", "address": "fa:16:3e:0a:5b:2b", "network": {"id": "9c25e5e9-468d-4d4c-93e0-c9815eff1c2e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-814005109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e693d73d70140c2ba065de2b60838c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165046e2-6d", "ovs_interfaceid": "165046e2-6d39-4a5c-9e2f-57619d3c8309", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1645.371747] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:5b:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7514a465-f1a4-4a8b-b76b-726b1a9d7e2f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '165046e2-6d39-4a5c-9e2f-57619d3c8309', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1645.379991] env[63371]: DEBUG oslo.service.loopingcall [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1645.380239] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1645.380474] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1dda019-fb90-4b4d-ac27-1482826c253b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.399925] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1645.399925] env[63371]: value = "task-1774377" [ 1645.399925] env[63371]: _type = "Task" [ 1645.399925] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.407494] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774377, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.473462] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774376, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.508459] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.911s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1645.510796] env[63371]: DEBUG oslo_concurrency.lockutils [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.788s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1645.511044] env[63371]: DEBUG nova.objects.instance [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lazy-loading 'resources' on Instance uuid f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1645.532746] env[63371]: INFO nova.scheduler.client.report [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted allocations for instance 3027832f-12cd-4255-b699-bcbb254a6c5a [ 1645.563033] env[63371]: DEBUG nova.compute.manager [req-f9e9b341-64a9-4f51-86f9-97d35a7b5800 req-49414146-de99-4af7-a542-44ce0d76e124 service nova] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Received event network-changed-165046e2-6d39-4a5c-9e2f-57619d3c8309 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1645.563033] env[63371]: DEBUG nova.compute.manager [req-f9e9b341-64a9-4f51-86f9-97d35a7b5800 req-49414146-de99-4af7-a542-44ce0d76e124 service nova] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Refreshing instance network info cache due to event network-changed-165046e2-6d39-4a5c-9e2f-57619d3c8309. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1645.563033] env[63371]: DEBUG oslo_concurrency.lockutils [req-f9e9b341-64a9-4f51-86f9-97d35a7b5800 req-49414146-de99-4af7-a542-44ce0d76e124 service nova] Acquiring lock "refresh_cache-3065fc71-f127-43b7-83b7-70140f29965b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.563033] env[63371]: DEBUG oslo_concurrency.lockutils [req-f9e9b341-64a9-4f51-86f9-97d35a7b5800 req-49414146-de99-4af7-a542-44ce0d76e124 service nova] Acquired lock "refresh_cache-3065fc71-f127-43b7-83b7-70140f29965b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1645.563033] env[63371]: DEBUG nova.network.neutron [req-f9e9b341-64a9-4f51-86f9-97d35a7b5800 req-49414146-de99-4af7-a542-44ce0d76e124 service nova] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Refreshing network info cache for port 165046e2-6d39-4a5c-9e2f-57619d3c8309 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1645.620991] env[63371]: DEBUG nova.compute.manager [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1645.649761] env[63371]: DEBUG nova.virt.hardware [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1645.650022] env[63371]: DEBUG nova.virt.hardware [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1645.650179] env[63371]: DEBUG nova.virt.hardware [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1645.650357] env[63371]: DEBUG nova.virt.hardware [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1645.650501] env[63371]: DEBUG nova.virt.hardware [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1645.651131] env[63371]: DEBUG nova.virt.hardware [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1645.651131] env[63371]: DEBUG nova.virt.hardware [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1645.651131] env[63371]: DEBUG nova.virt.hardware [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1645.651275] env[63371]: DEBUG nova.virt.hardware [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1645.651306] env[63371]: DEBUG nova.virt.hardware [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1645.651469] env[63371]: DEBUG nova.virt.hardware [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1645.652654] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50703e2-ee24-4e1b-94d8-dfbfcad9d051 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.666730] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb7755d-f3c6-4fe9-a42d-1e9e4fcd5f44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.671312] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774374, 'name': CloneVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.784196] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521632e1-e8ed-5234-f3a9-0549b98b69ca, 'name': SearchDatastore_Task, 'duration_secs': 0.015173} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.784483] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1645.784722] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3c6294ae-9a16-4f1e-abd4-1aec224625ac/3c6294ae-9a16-4f1e-abd4-1aec224625ac.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1645.784975] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee517709-96a7-4aa5-aa46-21ebbf0b3635 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.792913] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1645.792913] env[63371]: value = "task-1774378" [ 1645.792913] env[63371]: _type = "Task" [ 1645.792913] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.800487] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774378, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.910374] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774377, 'name': CreateVM_Task, 'duration_secs': 0.39553} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.910561] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1645.911271] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1645.911470] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1645.911811] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1645.912084] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f060de4d-3490-41f5-9742-50dcebfee0d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.916860] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1645.916860] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52df7c82-71bd-f435-cec7-2b4953f7602a" [ 1645.916860] env[63371]: _type = "Task" [ 1645.916860] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.924645] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df7c82-71bd-f435-cec7-2b4953f7602a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.975096] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774376, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.042309] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e5e6ee2a-1895-46d3-91fd-cbe973143d64 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "3027832f-12cd-4255-b699-bcbb254a6c5a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.402s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.170742] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774374, 'name': CloneVM_Task, 'duration_secs': 1.54865} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.171998] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Created linked-clone VM from snapshot [ 1646.173107] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d70404-0d5c-474b-8416-9fccdeea23c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.194360] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Uploading image 8f12a426-cc2b-451c-9e39-167235455267 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1646.224471] env[63371]: DEBUG nova.network.neutron [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Successfully updated port: e6dc9116-b799-4666-8abb-a5e2ab4749e5 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1646.239986] env[63371]: DEBUG oslo_vmware.rw_handles [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1646.239986] env[63371]: value = "vm-368410" [ 1646.239986] env[63371]: _type = "VirtualMachine" [ 1646.239986] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1646.240217] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-160205fe-e1b0-46da-89cf-80c50fcd4de0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.251636] env[63371]: DEBUG oslo_vmware.rw_handles [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lease: (returnval){ [ 1646.251636] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520bd32e-a896-3fe1-f372-00c835d34a8a" [ 1646.251636] env[63371]: _type = "HttpNfcLease" [ 1646.251636] env[63371]: } obtained for exporting VM: (result){ [ 1646.251636] env[63371]: value = "vm-368410" [ 1646.251636] env[63371]: _type = "VirtualMachine" [ 1646.251636] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1646.251636] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the lease: (returnval){ [ 1646.251636] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520bd32e-a896-3fe1-f372-00c835d34a8a" [ 1646.251636] env[63371]: _type = "HttpNfcLease" [ 1646.251636] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1646.263660] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1646.263660] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520bd32e-a896-3fe1-f372-00c835d34a8a" [ 1646.263660] env[63371]: _type = "HttpNfcLease" [ 1646.263660] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1646.304117] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774378, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.369365] env[63371]: DEBUG nova.network.neutron [req-f9e9b341-64a9-4f51-86f9-97d35a7b5800 req-49414146-de99-4af7-a542-44ce0d76e124 service nova] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Updated VIF entry in instance network info cache for port 165046e2-6d39-4a5c-9e2f-57619d3c8309. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1646.369824] env[63371]: DEBUG nova.network.neutron [req-f9e9b341-64a9-4f51-86f9-97d35a7b5800 req-49414146-de99-4af7-a542-44ce0d76e124 service nova] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Updating instance_info_cache with network_info: [{"id": "165046e2-6d39-4a5c-9e2f-57619d3c8309", "address": "fa:16:3e:0a:5b:2b", "network": {"id": "9c25e5e9-468d-4d4c-93e0-c9815eff1c2e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-814005109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e693d73d70140c2ba065de2b60838c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap165046e2-6d", "ovs_interfaceid": "165046e2-6d39-4a5c-9e2f-57619d3c8309", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1646.423977] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b7fc7c-6d6c-430d-88e7-46abeb30b323 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.432489] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df7c82-71bd-f435-cec7-2b4953f7602a, 'name': SearchDatastore_Task, 'duration_secs': 0.009724} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.434378] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1646.434614] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1646.434841] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.434987] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.435184] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1646.435481] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74c85037-7e64-4652-9dca-80f8aecbd811 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.438178] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60df89a0-b855-46af-91fc-b29836c26bf6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.474472] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87af8508-cdaa-4f0c-9141-7860c0384441 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.476955] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1646.477112] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1646.478006] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55da985a-6025-431e-a631-043b00afd6c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.489829] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e671af-e812-49f5-a699-687eb184f46b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.493858] env[63371]: DEBUG oslo_vmware.api [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774376, 'name': PowerOnVM_Task, 'duration_secs': 1.041365} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.494230] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1646.494230] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5268d572-0e59-b0dc-4358-6485271dc5b1" [ 1646.494230] env[63371]: _type = "Task" [ 1646.494230] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.494950] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1646.495310] env[63371]: INFO nova.compute.manager [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Took 8.46 seconds to spawn the instance on the hypervisor. [ 1646.495561] env[63371]: DEBUG nova.compute.manager [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1646.496796] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a90480-3638-4485-8ff3-e82713285556 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.511187] env[63371]: DEBUG nova.compute.provider_tree [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1646.521212] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5268d572-0e59-b0dc-4358-6485271dc5b1, 'name': SearchDatastore_Task, 'duration_secs': 0.0121} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.524358] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f444d66-ebad-482d-9603-469d92f7f5b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.529801] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1646.529801] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd1477-e041-fd4d-6a68-d8a6e27ec262" [ 1646.529801] env[63371]: _type = "Task" [ 1646.529801] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.539380] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd1477-e041-fd4d-6a68-d8a6e27ec262, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.734676] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Acquiring lock "refresh_cache-36d5c00a-4762-4801-aff1-0a22e336730a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.734676] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Acquired lock "refresh_cache-36d5c00a-4762-4801-aff1-0a22e336730a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.734676] env[63371]: DEBUG nova.network.neutron [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1646.761375] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1646.761375] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520bd32e-a896-3fe1-f372-00c835d34a8a" [ 1646.761375] env[63371]: _type = "HttpNfcLease" [ 1646.761375] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1646.761674] env[63371]: DEBUG oslo_vmware.rw_handles [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1646.761674] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520bd32e-a896-3fe1-f372-00c835d34a8a" [ 1646.761674] env[63371]: _type = "HttpNfcLease" [ 1646.761674] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1646.762464] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-438659c4-94a6-4b08-b1e2-0ab0008a8729 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.770260] env[63371]: DEBUG oslo_vmware.rw_handles [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522c5695-3098-9d18-b482-9bf6a7450392/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1646.770533] env[63371]: DEBUG oslo_vmware.rw_handles [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522c5695-3098-9d18-b482-9bf6a7450392/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1646.837927] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774378, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6286} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.838245] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3c6294ae-9a16-4f1e-abd4-1aec224625ac/3c6294ae-9a16-4f1e-abd4-1aec224625ac.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1646.838425] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1646.838679] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b47c7db2-7070-4f9c-a8c4-30ef991cf9a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.846112] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1646.846112] env[63371]: value = "task-1774380" [ 1646.846112] env[63371]: _type = "Task" [ 1646.846112] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.853986] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774380, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.866074] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-6d00b541-862f-4d2d-91c8-975526e9c25a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.875678] env[63371]: DEBUG oslo_concurrency.lockutils [req-f9e9b341-64a9-4f51-86f9-97d35a7b5800 req-49414146-de99-4af7-a542-44ce0d76e124 service nova] Releasing lock "refresh_cache-3065fc71-f127-43b7-83b7-70140f29965b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.014638] env[63371]: DEBUG nova.scheduler.client.report [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1647.036079] env[63371]: INFO nova.compute.manager [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Took 43.40 seconds to build instance. [ 1647.046609] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd1477-e041-fd4d-6a68-d8a6e27ec262, 'name': SearchDatastore_Task, 'duration_secs': 0.009126} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.046883] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1647.047182] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3065fc71-f127-43b7-83b7-70140f29965b/3065fc71-f127-43b7-83b7-70140f29965b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1647.047483] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-482ee281-e79e-45ad-8d53-e1735ba362cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.058026] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1647.058026] env[63371]: value = "task-1774381" [ 1647.058026] env[63371]: _type = "Task" [ 1647.058026] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.068539] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774381, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.272480] env[63371]: DEBUG nova.network.neutron [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1647.356634] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774380, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.222131} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.358107] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1647.358107] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35e0da9d-7129-4ec1-9f6b-f3d0dec68ff7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.385885] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 3c6294ae-9a16-4f1e-abd4-1aec224625ac/3c6294ae-9a16-4f1e-abd4-1aec224625ac.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1647.389329] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c55c435e-99e3-419a-9fce-913635743078 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.421877] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "61a44b0c-86fc-4f1c-a102-61eaff509d20" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.422394] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "61a44b0c-86fc-4f1c-a102-61eaff509d20" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.425661] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1647.425661] env[63371]: value = "task-1774382" [ 1647.425661] env[63371]: _type = "Task" [ 1647.425661] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.436914] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774382, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.522021] env[63371]: DEBUG oslo_concurrency.lockutils [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.011s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.526339] env[63371]: DEBUG nova.network.neutron [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Updating instance_info_cache with network_info: [{"id": "e6dc9116-b799-4666-8abb-a5e2ab4749e5", "address": "fa:16:3e:7a:98:57", "network": {"id": "242b7d96-2f57-46ea-a28b-7631355f3c95", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1756848462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5c0a56fc6994e8b94d4bc92ab09726e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6dc9116-b7", "ovs_interfaceid": "e6dc9116-b799-4666-8abb-a5e2ab4749e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.527603] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.803s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.527833] env[63371]: DEBUG nova.objects.instance [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lazy-loading 'resources' on Instance uuid 0cd2018f-7a54-4458-b5fd-353ab75ffbfd {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1647.538092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6cf6b9dc-c69b-4c4e-b45a-337bb88deb76 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.911s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.550237] env[63371]: INFO nova.scheduler.client.report [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Deleted allocations for instance f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed [ 1647.569245] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774381, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.590982] env[63371]: DEBUG nova.compute.manager [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Received event network-vif-plugged-e6dc9116-b799-4666-8abb-a5e2ab4749e5 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1647.591632] env[63371]: DEBUG oslo_concurrency.lockutils [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] Acquiring lock "36d5c00a-4762-4801-aff1-0a22e336730a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.591782] env[63371]: DEBUG oslo_concurrency.lockutils [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] Lock "36d5c00a-4762-4801-aff1-0a22e336730a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.592142] env[63371]: DEBUG oslo_concurrency.lockutils [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] Lock "36d5c00a-4762-4801-aff1-0a22e336730a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.592339] env[63371]: DEBUG nova.compute.manager [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] No waiting events found dispatching network-vif-plugged-e6dc9116-b799-4666-8abb-a5e2ab4749e5 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1647.592672] env[63371]: WARNING nova.compute.manager [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Received unexpected event network-vif-plugged-e6dc9116-b799-4666-8abb-a5e2ab4749e5 for instance with vm_state building and task_state spawning. [ 1647.592909] env[63371]: DEBUG nova.compute.manager [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Received event network-changed-e6dc9116-b799-4666-8abb-a5e2ab4749e5 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1647.593197] env[63371]: DEBUG nova.compute.manager [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Refreshing instance network info cache due to event network-changed-e6dc9116-b799-4666-8abb-a5e2ab4749e5. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1647.593443] env[63371]: DEBUG oslo_concurrency.lockutils [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] Acquiring lock "refresh_cache-36d5c00a-4762-4801-aff1-0a22e336730a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.927181] env[63371]: DEBUG nova.compute.manager [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1647.939209] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774382, 'name': ReconfigVM_Task, 'duration_secs': 0.497253} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.939509] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 3c6294ae-9a16-4f1e-abd4-1aec224625ac/3c6294ae-9a16-4f1e-abd4-1aec224625ac.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1647.940243] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2de7ac60-2e12-46fb-a34c-5c6d26407660 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.947109] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1647.947109] env[63371]: value = "task-1774383" [ 1647.947109] env[63371]: _type = "Task" [ 1647.947109] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.956020] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774383, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.033307] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Releasing lock "refresh_cache-36d5c00a-4762-4801-aff1-0a22e336730a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.033631] env[63371]: DEBUG nova.compute.manager [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Instance network_info: |[{"id": "e6dc9116-b799-4666-8abb-a5e2ab4749e5", "address": "fa:16:3e:7a:98:57", "network": {"id": "242b7d96-2f57-46ea-a28b-7631355f3c95", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1756848462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5c0a56fc6994e8b94d4bc92ab09726e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6dc9116-b7", "ovs_interfaceid": "e6dc9116-b799-4666-8abb-a5e2ab4749e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1648.036724] env[63371]: DEBUG oslo_concurrency.lockutils [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] Acquired lock "refresh_cache-36d5c00a-4762-4801-aff1-0a22e336730a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1648.036917] env[63371]: DEBUG nova.network.neutron [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Refreshing network info cache for port e6dc9116-b799-4666-8abb-a5e2ab4749e5 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1648.038454] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:98:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '411f389f-4e4f-4450-891e-38944cac6135', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6dc9116-b799-4666-8abb-a5e2ab4749e5', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1648.049389] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Creating folder: Project (f5c0a56fc6994e8b94d4bc92ab09726e). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1648.049389] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ffc7e62-db0b-49e2-b3e8-14bed3fd4178 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.063550] env[63371]: DEBUG oslo_concurrency.lockutils [None req-24fe98c9-ab12-4e9d-878e-771e611af2d0 tempest-MigrationsAdminTest-1001891710 tempest-MigrationsAdminTest-1001891710-project-member] Lock "f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.465s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.071216] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774381, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.74643} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.074333] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3065fc71-f127-43b7-83b7-70140f29965b/3065fc71-f127-43b7-83b7-70140f29965b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1648.074565] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1648.076557] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63214ef5-7538-4c47-a108-c5246ed50fc6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.078522] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Created folder: Project (f5c0a56fc6994e8b94d4bc92ab09726e) in parent group-v368199. [ 1648.078700] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Creating folder: Instances. Parent ref: group-v368412. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1648.078926] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3822317-f5d7-405d-98a5-36f4bcd32004 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.087523] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1648.087523] env[63371]: value = "task-1774386" [ 1648.087523] env[63371]: _type = "Task" [ 1648.087523] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.095164] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Created folder: Instances in parent group-v368412. [ 1648.095430] env[63371]: DEBUG oslo.service.loopingcall [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1648.096192] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1648.096474] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c1863f5-3d90-4181-abf2-bcbc929c334a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.119128] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774386, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.125795] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1648.125795] env[63371]: value = "task-1774387" [ 1648.125795] env[63371]: _type = "Task" [ 1648.125795] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.133640] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774387, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.421139] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4062602e-df11-435a-bb28-11fcd19ed03e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.429016] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd0bdb7-4cfc-44fc-8858-846b17f8bf06 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.466903] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368b955b-8200-4ed8-818c-f8d964f86443 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.475139] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774383, 'name': Rename_Task, 'duration_secs': 0.162292} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.477623] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1648.478059] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-203ce2b8-8763-4c0a-a70a-bcf16eb182e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.480818] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb9239d2-0e1b-4e78-955c-b54fec9fa59b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.485684] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1648.498935] env[63371]: DEBUG nova.compute.provider_tree [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1648.501472] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1648.501472] env[63371]: value = "task-1774388" [ 1648.501472] env[63371]: _type = "Task" [ 1648.501472] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.510029] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774388, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.599140] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774386, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072814} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1648.599498] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1648.600399] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61dcc4c7-a171-472d-9363-d97cab4d828f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.623242] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 3065fc71-f127-43b7-83b7-70140f29965b/3065fc71-f127-43b7-83b7-70140f29965b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1648.623555] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98164ed6-5476-4b4c-98bc-b12c605ab607 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.646614] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774387, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.650262] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1648.650262] env[63371]: value = "task-1774389" [ 1648.650262] env[63371]: _type = "Task" [ 1648.650262] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1648.659573] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774389, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1648.796818] env[63371]: DEBUG nova.compute.manager [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Stashing vm_state: active {{(pid=63371) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1648.962595] env[63371]: DEBUG nova.network.neutron [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Updated VIF entry in instance network info cache for port e6dc9116-b799-4666-8abb-a5e2ab4749e5. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1648.962980] env[63371]: DEBUG nova.network.neutron [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Updating instance_info_cache with network_info: [{"id": "e6dc9116-b799-4666-8abb-a5e2ab4749e5", "address": "fa:16:3e:7a:98:57", "network": {"id": "242b7d96-2f57-46ea-a28b-7631355f3c95", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1756848462-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f5c0a56fc6994e8b94d4bc92ab09726e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6dc9116-b7", "ovs_interfaceid": "e6dc9116-b799-4666-8abb-a5e2ab4749e5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1649.003802] env[63371]: DEBUG nova.scheduler.client.report [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1649.020265] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774388, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.150090] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774387, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.161999] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774389, 'name': ReconfigVM_Task, 'duration_secs': 0.487738} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.162775] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 3065fc71-f127-43b7-83b7-70140f29965b/3065fc71-f127-43b7-83b7-70140f29965b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1649.164567] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0fa8d064-9055-4ae9-a0b2-4951c5311925 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.176120] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1649.176120] env[63371]: value = "task-1774390" [ 1649.176120] env[63371]: _type = "Task" [ 1649.176120] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.194244] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774390, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.318642] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.466238] env[63371]: DEBUG oslo_concurrency.lockutils [req-51f14264-ea9e-4fbc-8ddb-3786b23c6831 req-76ac70b2-1d1d-4488-8a98-934f59031352 service nova] Releasing lock "refresh_cache-36d5c00a-4762-4801-aff1-0a22e336730a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1649.512209] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.985s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1649.514426] env[63371]: DEBUG oslo_concurrency.lockutils [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.780s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1649.514622] env[63371]: DEBUG nova.objects.instance [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lazy-loading 'resources' on Instance uuid e912c210-3ae1-47ce-b9cd-afebf6195606 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1649.521267] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774388, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.534128] env[63371]: INFO nova.scheduler.client.report [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Deleted allocations for instance 0cd2018f-7a54-4458-b5fd-353ab75ffbfd [ 1649.648928] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774387, 'name': CreateVM_Task, 'duration_secs': 1.132805} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.649401] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1649.649924] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.650126] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.650511] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1649.650822] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1c897b2-4f4d-47c1-bba7-152410101269 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.656282] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Waiting for the task: (returnval){ [ 1649.656282] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a0920d-9426-8d2f-aa4e-99715d52466e" [ 1649.656282] env[63371]: _type = "Task" [ 1649.656282] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.665102] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a0920d-9426-8d2f-aa4e-99715d52466e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.686580] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774390, 'name': Rename_Task, 'duration_secs': 0.219312} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.687176] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1649.687501] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd9256c1-2ce2-4084-9a01-73a763e2c054 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.693515] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1649.693515] env[63371]: value = "task-1774391" [ 1649.693515] env[63371]: _type = "Task" [ 1649.693515] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.702403] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774391, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.020029] env[63371]: DEBUG oslo_vmware.api [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774388, 'name': PowerOnVM_Task, 'duration_secs': 1.18475} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.021042] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1650.021042] env[63371]: INFO nova.compute.manager [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Took 9.36 seconds to spawn the instance on the hypervisor. [ 1650.021162] env[63371]: DEBUG nova.compute.manager [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1650.021997] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8912f4e-c6ef-473e-8656-ba989415ae93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.042383] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3cc8d349-929d-463c-b152-84ab9baeeacb tempest-VolumesAdminNegativeTest-1044213934 tempest-VolumesAdminNegativeTest-1044213934-project-member] Lock "0cd2018f-7a54-4458-b5fd-353ab75ffbfd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.689s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.167330] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a0920d-9426-8d2f-aa4e-99715d52466e, 'name': SearchDatastore_Task, 'duration_secs': 0.014126} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.167653] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.167888] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1650.168142] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1650.168287] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.168461] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1650.168722] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4370de13-c993-4648-b6ef-6a4a0288556f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.179370] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1650.180160] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1650.180388] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0ffc4bd-a142-4d2a-903a-f88fe92e9d1c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.185740] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Waiting for the task: (returnval){ [ 1650.185740] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b0bde8-7a2f-1eb0-70e6-8499ae11b64b" [ 1650.185740] env[63371]: _type = "Task" [ 1650.185740] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.195661] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b0bde8-7a2f-1eb0-70e6-8499ae11b64b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.205364] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774391, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.363116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquiring lock "33952466-3df7-4485-8e7a-ab3d6ec3f22c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.363116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lock "33952466-3df7-4485-8e7a-ab3d6ec3f22c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.371357] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a13c68b-c5f2-4550-8e77-22928a0eca02 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.380048] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0ed19d-a974-4322-a3b7-0d4498e6d1cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.415205] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36852145-9cdb-498b-8aac-0bbf4f0b9de3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.424236] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d712bc7-8c59-4aba-876b-98c2943a5aeb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.438912] env[63371]: DEBUG nova.compute.provider_tree [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1650.543235] env[63371]: INFO nova.compute.manager [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Took 31.57 seconds to build instance. [ 1650.696999] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b0bde8-7a2f-1eb0-70e6-8499ae11b64b, 'name': SearchDatastore_Task, 'duration_secs': 0.014489} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.701231] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e70b52ba-f94e-45bf-8ccb-578898f1bbb6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.707841] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Waiting for the task: (returnval){ [ 1650.707841] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c14162-eaec-9a20-6790-ff68509b4739" [ 1650.707841] env[63371]: _type = "Task" [ 1650.707841] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.710799] env[63371]: DEBUG oslo_vmware.api [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774391, 'name': PowerOnVM_Task, 'duration_secs': 0.657495} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.715180] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1650.715430] env[63371]: INFO nova.compute.manager [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Took 7.47 seconds to spawn the instance on the hypervisor. [ 1650.715583] env[63371]: DEBUG nova.compute.manager [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1650.716343] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e82f06-1d4f-4cfc-9fbc-e3ac05ce68ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.724379] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c14162-eaec-9a20-6790-ff68509b4739, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.864614] env[63371]: DEBUG nova.compute.manager [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1650.942767] env[63371]: DEBUG nova.scheduler.client.report [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1651.006693] env[63371]: DEBUG nova.compute.manager [req-d9244157-25e3-44fb-972a-2b30c26f54bd req-b8d5aa5c-e086-4252-9946-c7014f8e4d7a service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Received event network-changed-8fb2ce38-fb30-464a-9fa3-42bd21ffe84c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1651.006874] env[63371]: DEBUG nova.compute.manager [req-d9244157-25e3-44fb-972a-2b30c26f54bd req-b8d5aa5c-e086-4252-9946-c7014f8e4d7a service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Refreshing instance network info cache due to event network-changed-8fb2ce38-fb30-464a-9fa3-42bd21ffe84c. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1651.007104] env[63371]: DEBUG oslo_concurrency.lockutils [req-d9244157-25e3-44fb-972a-2b30c26f54bd req-b8d5aa5c-e086-4252-9946-c7014f8e4d7a service nova] Acquiring lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1651.007247] env[63371]: DEBUG oslo_concurrency.lockutils [req-d9244157-25e3-44fb-972a-2b30c26f54bd req-b8d5aa5c-e086-4252-9946-c7014f8e4d7a service nova] Acquired lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1651.007406] env[63371]: DEBUG nova.network.neutron [req-d9244157-25e3-44fb-972a-2b30c26f54bd req-b8d5aa5c-e086-4252-9946-c7014f8e4d7a service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Refreshing network info cache for port 8fb2ce38-fb30-464a-9fa3-42bd21ffe84c {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1651.045600] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d8bfff29-84fa-4935-9f45-d592d6091653 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.079s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.221361] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c14162-eaec-9a20-6790-ff68509b4739, 'name': SearchDatastore_Task, 'duration_secs': 0.018206} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.221755] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1651.222211] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 36d5c00a-4762-4801-aff1-0a22e336730a/36d5c00a-4762-4801-aff1-0a22e336730a.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1651.222514] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0de222d6-c65a-4727-b49f-8ef6aad385b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.244163] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Waiting for the task: (returnval){ [ 1651.244163] env[63371]: value = "task-1774392" [ 1651.244163] env[63371]: _type = "Task" [ 1651.244163] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.244755] env[63371]: INFO nova.compute.manager [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Took 29.73 seconds to build instance. [ 1651.261025] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774392, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.387155] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.451221] env[63371]: DEBUG oslo_concurrency.lockutils [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.937s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.453651] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.289s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.455373] env[63371]: INFO nova.compute.claims [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1651.471502] env[63371]: INFO nova.scheduler.client.report [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleted allocations for instance e912c210-3ae1-47ce-b9cd-afebf6195606 [ 1651.569832] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.570185] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.570385] env[63371]: INFO nova.compute.manager [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Rebooting instance [ 1651.751261] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20737e41-ff54-4f02-afe0-0508ecedd873 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "3065fc71-f127-43b7-83b7-70140f29965b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.241s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1651.769526] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774392, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.906407] env[63371]: DEBUG nova.network.neutron [req-d9244157-25e3-44fb-972a-2b30c26f54bd req-b8d5aa5c-e086-4252-9946-c7014f8e4d7a service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Updated VIF entry in instance network info cache for port 8fb2ce38-fb30-464a-9fa3-42bd21ffe84c. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1651.906624] env[63371]: DEBUG nova.network.neutron [req-d9244157-25e3-44fb-972a-2b30c26f54bd req-b8d5aa5c-e086-4252-9946-c7014f8e4d7a service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Updating instance_info_cache with network_info: [{"id": "8fb2ce38-fb30-464a-9fa3-42bd21ffe84c", "address": "fa:16:3e:61:26:f7", "network": {"id": "1dc1e53b-b865-4642-b667-e771524c6438", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-562533890-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fd1634f21c45efa8606cf6c339a790", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fb2ce38-fb", "ovs_interfaceid": "8fb2ce38-fb30-464a-9fa3-42bd21ffe84c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1651.980292] env[63371]: DEBUG oslo_concurrency.lockutils [None req-405f0d65-3581-470a-987d-55ca461c9f05 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e912c210-3ae1-47ce-b9cd-afebf6195606" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.277s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.097905] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1652.253052] env[63371]: DEBUG oslo_concurrency.lockutils [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.253346] env[63371]: DEBUG oslo_concurrency.lockutils [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.253453] env[63371]: DEBUG oslo_concurrency.lockutils [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.253633] env[63371]: DEBUG oslo_concurrency.lockutils [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.253802] env[63371]: DEBUG oslo_concurrency.lockutils [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.259840] env[63371]: INFO nova.compute.manager [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Terminating instance [ 1652.262877] env[63371]: DEBUG nova.compute.manager [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1652.262877] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1652.263023] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69390de-c1a9-4fbb-a9d1-5dfae329ac5d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.273932] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774392, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.859056} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.274244] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 36d5c00a-4762-4801-aff1-0a22e336730a/36d5c00a-4762-4801-aff1-0a22e336730a.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1652.274405] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1652.274664] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e079610b-81e2-4931-9622-5e9394657082 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.280742] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1652.285020] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24cf1010-7806-48d2-a537-09b8bbf7d09e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.285630] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Waiting for the task: (returnval){ [ 1652.285630] env[63371]: value = "task-1774393" [ 1652.285630] env[63371]: _type = "Task" [ 1652.285630] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.291549] env[63371]: DEBUG oslo_vmware.api [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1652.291549] env[63371]: value = "task-1774394" [ 1652.291549] env[63371]: _type = "Task" [ 1652.291549] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.300714] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774393, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.309242] env[63371]: DEBUG oslo_vmware.api [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774394, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.375100] env[63371]: DEBUG oslo_concurrency.lockutils [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "3065fc71-f127-43b7-83b7-70140f29965b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.375467] env[63371]: DEBUG oslo_concurrency.lockutils [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "3065fc71-f127-43b7-83b7-70140f29965b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.375728] env[63371]: DEBUG oslo_concurrency.lockutils [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "3065fc71-f127-43b7-83b7-70140f29965b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.375949] env[63371]: DEBUG oslo_concurrency.lockutils [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "3065fc71-f127-43b7-83b7-70140f29965b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.376151] env[63371]: DEBUG oslo_concurrency.lockutils [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "3065fc71-f127-43b7-83b7-70140f29965b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.378717] env[63371]: INFO nova.compute.manager [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Terminating instance [ 1652.380777] env[63371]: DEBUG nova.compute.manager [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1652.382321] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1652.382321] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-199bf1e9-2ccd-4dc0-8911-34d23cab7058 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.390249] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1652.390908] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f44f752b-df08-4bb9-8322-e243dcf41465 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.398473] env[63371]: DEBUG oslo_vmware.api [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1652.398473] env[63371]: value = "task-1774395" [ 1652.398473] env[63371]: _type = "Task" [ 1652.398473] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.406803] env[63371]: DEBUG oslo_vmware.api [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774395, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.411449] env[63371]: DEBUG oslo_concurrency.lockutils [req-d9244157-25e3-44fb-972a-2b30c26f54bd req-b8d5aa5c-e086-4252-9946-c7014f8e4d7a service nova] Releasing lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1652.411862] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquired lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1652.412064] env[63371]: DEBUG nova.network.neutron [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1652.799427] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774393, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067523} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.800136] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1652.800995] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480a9888-428b-4ea5-952b-e25810c5065d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.806535] env[63371]: DEBUG oslo_vmware.api [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774394, 'name': PowerOffVM_Task, 'duration_secs': 0.297511} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.809407] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1652.809669] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1652.810125] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bb86393-ced2-479b-813f-74647da26555 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.829832] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 36d5c00a-4762-4801-aff1-0a22e336730a/36d5c00a-4762-4801-aff1-0a22e336730a.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1652.833141] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b7d958a-888d-4981-af18-4f6ffbf24cbf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.852829] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Waiting for the task: (returnval){ [ 1652.852829] env[63371]: value = "task-1774397" [ 1652.852829] env[63371]: _type = "Task" [ 1652.852829] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.859295] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b63189-5328-4b11-bf17-2fd48cc041a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.864399] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774397, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.869153] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa834f19-21d0-42e4-81ab-7418ea66860a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.911959] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5dfbec-5607-40cc-ba56-11e0357055f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.924715] env[63371]: DEBUG oslo_vmware.api [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774395, 'name': PowerOffVM_Task, 'duration_secs': 0.217643} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.925109] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1652.925274] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1652.926586] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03ad7d7-3279-4778-86ea-838d739ea77b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.932323] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-73f01321-82b9-4fa5-8e1d-8f7f298aee1d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.933469] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1652.933674] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1652.933846] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleting the datastore file [datastore1] e4608e3c-7083-42fa-b88c-8ee007ef7f60 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1652.934531] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c2c4a37-2b4e-48b4-a1de-44f7006691ab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.945510] env[63371]: DEBUG nova.compute.provider_tree [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1652.951716] env[63371]: DEBUG oslo_vmware.api [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for the task: (returnval){ [ 1652.951716] env[63371]: value = "task-1774399" [ 1652.951716] env[63371]: _type = "Task" [ 1652.951716] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.959628] env[63371]: DEBUG oslo_vmware.api [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774399, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.028057] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1653.028328] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1653.028681] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Deleting the datastore file [datastore1] 3065fc71-f127-43b7-83b7-70140f29965b {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1653.028968] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db54a108-8185-4d74-a4db-a3085340d4cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.036681] env[63371]: DEBUG oslo_vmware.api [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1653.036681] env[63371]: value = "task-1774400" [ 1653.036681] env[63371]: _type = "Task" [ 1653.036681] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.048623] env[63371]: DEBUG oslo_vmware.api [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.364452] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774397, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.377452] env[63371]: DEBUG nova.network.neutron [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Updating instance_info_cache with network_info: [{"id": "8fb2ce38-fb30-464a-9fa3-42bd21ffe84c", "address": "fa:16:3e:61:26:f7", "network": {"id": "1dc1e53b-b865-4642-b667-e771524c6438", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-562533890-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fd1634f21c45efa8606cf6c339a790", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fb2ce38-fb", "ovs_interfaceid": "8fb2ce38-fb30-464a-9fa3-42bd21ffe84c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1653.450373] env[63371]: DEBUG nova.scheduler.client.report [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1653.468373] env[63371]: DEBUG oslo_vmware.api [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Task: {'id': task-1774399, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287571} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.468738] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1653.469028] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1653.469312] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1653.473022] env[63371]: INFO nova.compute.manager [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1653.473022] env[63371]: DEBUG oslo.service.loopingcall [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1653.473022] env[63371]: DEBUG nova.compute.manager [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1653.473022] env[63371]: DEBUG nova.network.neutron [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1653.549314] env[63371]: DEBUG oslo_vmware.api [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.283002} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.549314] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1653.549314] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1653.549505] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1653.550021] env[63371]: INFO nova.compute.manager [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1653.550370] env[63371]: DEBUG oslo.service.loopingcall [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1653.550636] env[63371]: DEBUG nova.compute.manager [-] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1653.550788] env[63371]: DEBUG nova.network.neutron [-] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1653.868794] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774397, 'name': ReconfigVM_Task, 'duration_secs': 0.547834} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.869230] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 36d5c00a-4762-4801-aff1-0a22e336730a/36d5c00a-4762-4801-aff1-0a22e336730a.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1653.870143] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44875ad9-8a06-463c-ae89-63c57e854512 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.877508] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Waiting for the task: (returnval){ [ 1653.877508] env[63371]: value = "task-1774401" [ 1653.877508] env[63371]: _type = "Task" [ 1653.877508] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.881630] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Releasing lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1653.885346] env[63371]: DEBUG nova.compute.manager [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1653.886279] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb010a9-0aa2-460a-9fa8-17c51d155a14 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.895604] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774401, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.963055] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.963641] env[63371]: DEBUG nova.compute.manager [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1653.967168] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.481s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.969669] env[63371]: INFO nova.compute.claims [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1654.390508] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774401, 'name': Rename_Task, 'duration_secs': 0.178684} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.390878] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1654.391601] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-696a763f-fe25-48af-90a9-a61acb3a2a48 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.401414] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Waiting for the task: (returnval){ [ 1654.401414] env[63371]: value = "task-1774402" [ 1654.401414] env[63371]: _type = "Task" [ 1654.401414] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.417466] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774402, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.442174] env[63371]: DEBUG nova.compute.manager [req-be6eaa2f-adb0-4613-8224-0a95a61a3ffd req-97540b77-3845-4510-a9cd-8194770a3612 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Received event network-vif-deleted-e2249de3-2c03-4371-aab4-6173dd2b5d56 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1654.443315] env[63371]: INFO nova.compute.manager [req-be6eaa2f-adb0-4613-8224-0a95a61a3ffd req-97540b77-3845-4510-a9cd-8194770a3612 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Neutron deleted interface e2249de3-2c03-4371-aab4-6173dd2b5d56; detaching it from the instance and deleting it from the info cache [ 1654.443315] env[63371]: DEBUG nova.network.neutron [req-be6eaa2f-adb0-4613-8224-0a95a61a3ffd req-97540b77-3845-4510-a9cd-8194770a3612 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.475709] env[63371]: DEBUG nova.compute.utils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1654.483081] env[63371]: DEBUG nova.compute.manager [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1654.483081] env[63371]: DEBUG nova.network.neutron [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1654.575094] env[63371]: DEBUG nova.policy [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1b2f698ebd747d6a84ac3f3e05e97b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a5b81b233f640b186d9798ff57a4945', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1654.907687] env[63371]: DEBUG nova.network.neutron [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.922953] env[63371]: DEBUG nova.network.neutron [-] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.929946] env[63371]: DEBUG oslo_vmware.api [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774402, 'name': PowerOnVM_Task, 'duration_secs': 0.466939} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.930757] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c010376a-a377-45c1-b118-a378c9cc9c7c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.933851] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1654.934191] env[63371]: INFO nova.compute.manager [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Took 9.31 seconds to spawn the instance on the hypervisor. [ 1654.934518] env[63371]: DEBUG nova.compute.manager [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1654.936042] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c4323b-a534-4346-a66f-5c892ebc5fc4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.947861] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f975033e-3c4b-47ac-82b2-2a4c855b8230 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.953654] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Doing hard reboot of VM {{(pid=63371) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1654.957045] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-3f388bb6-a0b3-414d-a0f9-8712c2a860d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.966468] env[63371]: INFO nova.compute.manager [None req-ad0eade7-1479-4b8d-8bbe-24fd089543c3 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Get console output [ 1654.966468] env[63371]: WARNING nova.virt.vmwareapi.driver [None req-ad0eade7-1479-4b8d-8bbe-24fd089543c3 tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] The console log is missing. Check your VSPC configuration [ 1654.971010] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cddb5f0-c0cf-4d66-b0c6-0dc910eb2ca6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.988664] env[63371]: DEBUG nova.compute.manager [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1654.995114] env[63371]: DEBUG oslo_vmware.api [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1654.995114] env[63371]: value = "task-1774403" [ 1654.995114] env[63371]: _type = "Task" [ 1654.995114] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.007624] env[63371]: DEBUG oslo_vmware.api [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774403, 'name': ResetVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.023944] env[63371]: DEBUG nova.compute.manager [req-be6eaa2f-adb0-4613-8224-0a95a61a3ffd req-97540b77-3845-4510-a9cd-8194770a3612 service nova] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Detach interface failed, port_id=e2249de3-2c03-4371-aab4-6173dd2b5d56, reason: Instance e4608e3c-7083-42fa-b88c-8ee007ef7f60 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1655.052394] env[63371]: DEBUG nova.network.neutron [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Successfully created port: d78faddd-f19f-47ac-bee0-dd2e71ad5e42 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1655.393033] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a9231a-99d5-4f7c-96a0-424927be85e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.400955] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bded51-24f5-4730-9de8-66051f65e688 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.435560] env[63371]: INFO nova.compute.manager [-] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Took 1.97 seconds to deallocate network for instance. [ 1655.435921] env[63371]: INFO nova.compute.manager [-] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Took 1.89 seconds to deallocate network for instance. [ 1655.450637] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af4c7a3-ba46-488e-a7d2-833d48a1f882 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.463129] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78248983-e69f-468c-9990-29b9470d9b2c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.489888] env[63371]: INFO nova.compute.manager [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Took 31.41 seconds to build instance. [ 1655.491702] env[63371]: DEBUG nova.compute.provider_tree [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1655.508130] env[63371]: DEBUG oslo_vmware.api [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774403, 'name': ResetVM_Task, 'duration_secs': 0.124533} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.508415] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Did hard reboot of VM {{(pid=63371) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1655.508587] env[63371]: DEBUG nova.compute.manager [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1655.509433] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2afb9b-6604-45b7-890e-debbba7747be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.590632] env[63371]: DEBUG oslo_vmware.rw_handles [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522c5695-3098-9d18-b482-9bf6a7450392/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1655.591570] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9411b0d3-4611-4751-acae-85d7a3f1dc45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.598829] env[63371]: DEBUG oslo_vmware.rw_handles [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522c5695-3098-9d18-b482-9bf6a7450392/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1655.599585] env[63371]: ERROR oslo_vmware.rw_handles [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522c5695-3098-9d18-b482-9bf6a7450392/disk-0.vmdk due to incomplete transfer. [ 1655.599585] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8477736b-391f-4dd2-9313-d79ed99c18ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.606171] env[63371]: DEBUG oslo_vmware.rw_handles [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522c5695-3098-9d18-b482-9bf6a7450392/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1655.606442] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Uploaded image 8f12a426-cc2b-451c-9e39-167235455267 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1655.609188] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1655.610821] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e2093c4d-8108-4c96-aaac-d2cafb85b14d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.620451] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1655.620451] env[63371]: value = "task-1774404" [ 1655.620451] env[63371]: _type = "Task" [ 1655.620451] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.629409] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774404, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.959350] env[63371]: DEBUG oslo_concurrency.lockutils [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.960281] env[63371]: DEBUG oslo_concurrency.lockutils [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.992171] env[63371]: DEBUG oslo_concurrency.lockutils [None req-227fefc7-9a5e-47f4-9146-ab673a2c8dc6 tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Lock "36d5c00a-4762-4801-aff1-0a22e336730a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.916s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.998540] env[63371]: DEBUG nova.scheduler.client.report [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1656.002316] env[63371]: DEBUG nova.compute.manager [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1656.025799] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d4837c17-20fd-4ad1-91f0-3b031af243aa tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.456s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.032260] env[63371]: DEBUG nova.virt.hardware [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='fda3ec73367aa211bd0af969c1a5cf64',container_format='bare',created_at=2024-12-11T21:36:20Z,direct_url=,disk_format='vmdk',id=1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc,min_disk=1,min_ram=0,name='tempest-test-snap-2027418003',owner='2a5b81b233f640b186d9798ff57a4945',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-11T21:36:35Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1656.032260] env[63371]: DEBUG nova.virt.hardware [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1656.032260] env[63371]: DEBUG nova.virt.hardware [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1656.032260] env[63371]: DEBUG nova.virt.hardware [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1656.032260] env[63371]: DEBUG nova.virt.hardware [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1656.033102] env[63371]: DEBUG nova.virt.hardware [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1656.033369] env[63371]: DEBUG nova.virt.hardware [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1656.033535] env[63371]: DEBUG nova.virt.hardware [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1656.034725] env[63371]: DEBUG nova.virt.hardware [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1656.034725] env[63371]: DEBUG nova.virt.hardware [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1656.034725] env[63371]: DEBUG nova.virt.hardware [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1656.035143] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de163e6-6a18-429d-86b6-0da6dcbf7837 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.047424] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68bace44-64f4-4d6b-89db-2936287b53f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.116013] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.116384] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.116583] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1656.116766] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.116932] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.119182] env[63371]: INFO nova.compute.manager [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Terminating instance [ 1656.121121] env[63371]: DEBUG nova.compute.manager [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1656.121315] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1656.122586] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8dee3a9-96be-4bda-8124-d68045c70f8a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.135767] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774404, 'name': Destroy_Task, 'duration_secs': 0.385172} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.137822] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Destroyed the VM [ 1656.138084] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1656.138375] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1656.138587] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5bd4c657-5cf8-4ad1-825e-fcaf613a00c8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.143625] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4932993-edfd-4da4-a311-5a9354e7cc3c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.149992] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1656.149992] env[63371]: value = "task-1774405" [ 1656.149992] env[63371]: _type = "Task" [ 1656.149992] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.157264] env[63371]: DEBUG oslo_vmware.api [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1656.157264] env[63371]: value = "task-1774406" [ 1656.157264] env[63371]: _type = "Task" [ 1656.157264] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.165803] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774405, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.173739] env[63371]: DEBUG oslo_vmware.api [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1774406, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.391625] env[63371]: DEBUG nova.compute.manager [req-9e6d067c-550d-4366-806c-87bfd5ab0932 req-1c748648-8c72-4352-a725-15c0ac774033 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Received event network-changed-8fb2ce38-fb30-464a-9fa3-42bd21ffe84c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1656.391966] env[63371]: DEBUG nova.compute.manager [req-9e6d067c-550d-4366-806c-87bfd5ab0932 req-1c748648-8c72-4352-a725-15c0ac774033 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Refreshing instance network info cache due to event network-changed-8fb2ce38-fb30-464a-9fa3-42bd21ffe84c. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1656.392098] env[63371]: DEBUG oslo_concurrency.lockutils [req-9e6d067c-550d-4366-806c-87bfd5ab0932 req-1c748648-8c72-4352-a725-15c0ac774033 service nova] Acquiring lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1656.392245] env[63371]: DEBUG oslo_concurrency.lockutils [req-9e6d067c-550d-4366-806c-87bfd5ab0932 req-1c748648-8c72-4352-a725-15c0ac774033 service nova] Acquired lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1656.392770] env[63371]: DEBUG nova.network.neutron [req-9e6d067c-550d-4366-806c-87bfd5ab0932 req-1c748648-8c72-4352-a725-15c0ac774033 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Refreshing network info cache for port 8fb2ce38-fb30-464a-9fa3-42bd21ffe84c {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1656.511022] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.540s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.511022] env[63371]: DEBUG nova.compute.manager [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1656.512245] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 7.194s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.608533] env[63371]: DEBUG nova.compute.manager [req-05026f00-cc23-4309-855c-04360029ff95 req-02b74aa3-d881-40d1-b8f5-45b20a86025e service nova] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Received event network-vif-deleted-165046e2-6d39-4a5c-9e2f-57619d3c8309 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1656.664794] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774405, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.667710] env[63371]: DEBUG oslo_vmware.api [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1774406, 'name': PowerOffVM_Task, 'duration_secs': 0.456328} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.668113] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1656.668113] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1656.668721] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df95f8d1-14da-4883-98c6-9357bf44ab09 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.760530] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1656.761040] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1656.762024] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Deleting the datastore file [datastore1] e8bd5802-d2ff-4348-92d4-c23277f4eaeb {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1656.762420] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b02d18cf-2059-42ef-935e-437244bef249 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.770363] env[63371]: DEBUG oslo_vmware.api [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for the task: (returnval){ [ 1656.770363] env[63371]: value = "task-1774408" [ 1656.770363] env[63371]: _type = "Task" [ 1656.770363] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.779221] env[63371]: DEBUG oslo_vmware.api [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1774408, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.786352] env[63371]: DEBUG nova.network.neutron [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Successfully updated port: d78faddd-f19f-47ac-bee0-dd2e71ad5e42 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1657.020711] env[63371]: DEBUG nova.compute.utils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1657.027714] env[63371]: INFO nova.compute.claims [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1657.031838] env[63371]: DEBUG nova.compute.manager [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1657.032187] env[63371]: DEBUG nova.network.neutron [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1657.139413] env[63371]: DEBUG nova.policy [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c6416719728485f8dd45eea9e39fdc5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58f967d3770541269fb89f48b3df58c9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1657.165962] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774405, 'name': RemoveSnapshot_Task, 'duration_secs': 0.803383} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.165962] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1657.165962] env[63371]: DEBUG nova.compute.manager [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1657.166909] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4814d0f9-982e-4785-9df7-1d1fe81ab415 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.179671] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.179918] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.180142] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.180324] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1657.180487] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1657.182672] env[63371]: INFO nova.compute.manager [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Terminating instance [ 1657.184498] env[63371]: DEBUG nova.compute.manager [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1657.184694] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1657.185516] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ec6304f-b6f2-451e-991b-8ade29d3e863 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.194303] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1657.194358] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e23f4323-9fae-4eb7-adfa-90cdc4f626a1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.201744] env[63371]: DEBUG oslo_vmware.api [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1657.201744] env[63371]: value = "task-1774409" [ 1657.201744] env[63371]: _type = "Task" [ 1657.201744] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.211320] env[63371]: DEBUG oslo_vmware.api [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774409, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.280801] env[63371]: DEBUG oslo_vmware.api [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Task: {'id': task-1774408, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148565} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.281124] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1657.281313] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1657.281488] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1657.281658] env[63371]: INFO nova.compute.manager [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1657.281926] env[63371]: DEBUG oslo.service.loopingcall [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1657.282133] env[63371]: DEBUG nova.compute.manager [-] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1657.282228] env[63371]: DEBUG nova.network.neutron [-] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1657.284834] env[63371]: DEBUG nova.network.neutron [req-9e6d067c-550d-4366-806c-87bfd5ab0932 req-1c748648-8c72-4352-a725-15c0ac774033 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Updated VIF entry in instance network info cache for port 8fb2ce38-fb30-464a-9fa3-42bd21ffe84c. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1657.285213] env[63371]: DEBUG nova.network.neutron [req-9e6d067c-550d-4366-806c-87bfd5ab0932 req-1c748648-8c72-4352-a725-15c0ac774033 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Updating instance_info_cache with network_info: [{"id": "8fb2ce38-fb30-464a-9fa3-42bd21ffe84c", "address": "fa:16:3e:61:26:f7", "network": {"id": "1dc1e53b-b865-4642-b667-e771524c6438", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-562533890-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "22fd1634f21c45efa8606cf6c339a790", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fb2ce38-fb", "ovs_interfaceid": "8fb2ce38-fb30-464a-9fa3-42bd21ffe84c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1657.291129] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "refresh_cache-ee3ea0ef-cde9-4326-b564-1aa216e00751" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.291129] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "refresh_cache-ee3ea0ef-cde9-4326-b564-1aa216e00751" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.291129] env[63371]: DEBUG nova.network.neutron [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1657.518769] env[63371]: DEBUG nova.network.neutron [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Successfully created port: b10b945f-6d31-4c0a-8698-336c1a98a865 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1657.532673] env[63371]: DEBUG nova.compute.manager [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1657.537652] env[63371]: INFO nova.compute.resource_tracker [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating resource usage from migration 5c98d699-8a6c-49bb-bd05-0a06abecc138 [ 1657.690642] env[63371]: INFO nova.compute.manager [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Shelve offloading [ 1657.693028] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1657.693198] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d7fc3cc-348b-4b13-91fe-bd017710c00e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.704295] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1657.704295] env[63371]: value = "task-1774410" [ 1657.704295] env[63371]: _type = "Task" [ 1657.704295] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.716168] env[63371]: DEBUG oslo_vmware.api [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774409, 'name': PowerOffVM_Task, 'duration_secs': 0.205016} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.726449] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1657.726938] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1657.728519] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c8b2cfc-7c66-416c-a234-1f83b7d5a7c1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.731453] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1657.731453] env[63371]: DEBUG nova.compute.manager [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1657.732708] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883aad02-af6f-4ded-a3af-69aa2fc25a39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.742853] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "refresh_cache-40644960-1400-4dc6-9f2b-78afb7492a8d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.742853] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "refresh_cache-40644960-1400-4dc6-9f2b-78afb7492a8d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.742853] env[63371]: DEBUG nova.network.neutron [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1657.787862] env[63371]: DEBUG oslo_concurrency.lockutils [req-9e6d067c-550d-4366-806c-87bfd5ab0932 req-1c748648-8c72-4352-a725-15c0ac774033 service nova] Releasing lock "refresh_cache-3c6294ae-9a16-4f1e-abd4-1aec224625ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.808564] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1657.808564] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1657.808564] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Deleting the datastore file [datastore1] 3c6294ae-9a16-4f1e-abd4-1aec224625ac {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1657.808564] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70ab93b7-90bf-4ad0-aa69-8d8006766dad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.818191] env[63371]: DEBUG oslo_vmware.api [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1657.818191] env[63371]: value = "task-1774412" [ 1657.818191] env[63371]: _type = "Task" [ 1657.818191] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.830032] env[63371]: DEBUG oslo_vmware.api [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774412, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.840350] env[63371]: DEBUG nova.network.neutron [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1658.019995] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe64397-ffc7-4f43-912c-de3f38e740c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.028551] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffe0217-4700-4adf-8963-b4d30e9aff63 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.073803] env[63371]: DEBUG nova.network.neutron [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Updating instance_info_cache with network_info: [{"id": "d78faddd-f19f-47ac-bee0-dd2e71ad5e42", "address": "fa:16:3e:08:1b:95", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd78faddd-f1", "ovs_interfaceid": "d78faddd-f19f-47ac-bee0-dd2e71ad5e42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.078777] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403dcf21-559a-47c5-8a7e-a6fc8d007790 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.088030] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e264c74-67f6-498c-ae28-bbe28602febe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.103364] env[63371]: DEBUG nova.compute.provider_tree [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1658.322916] env[63371]: DEBUG nova.network.neutron [-] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.333239] env[63371]: DEBUG oslo_vmware.api [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774412, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144406} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.333406] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1658.333582] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1658.333749] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1658.333914] env[63371]: INFO nova.compute.manager [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1658.334173] env[63371]: DEBUG oslo.service.loopingcall [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1658.334358] env[63371]: DEBUG nova.compute.manager [-] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1658.334447] env[63371]: DEBUG nova.network.neutron [-] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1658.420740] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Acquiring lock "36d5c00a-4762-4801-aff1-0a22e336730a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.421048] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Lock "36d5c00a-4762-4801-aff1-0a22e336730a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.421235] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Acquiring lock "36d5c00a-4762-4801-aff1-0a22e336730a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.421419] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Lock "36d5c00a-4762-4801-aff1-0a22e336730a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.421584] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Lock "36d5c00a-4762-4801-aff1-0a22e336730a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.425398] env[63371]: DEBUG nova.compute.manager [req-f933ee4b-2efa-4a6d-b0a7-b4c2c3225129 req-cd3104d3-f9b0-465b-9367-52f30355ffce service nova] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Received event network-vif-deleted-1d08ea03-4a7c-43bc-9a11-db1f92c6c505 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1658.425908] env[63371]: INFO nova.compute.manager [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Terminating instance [ 1658.427645] env[63371]: DEBUG nova.compute.manager [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1658.427838] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1658.432760] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f4c8900-aac6-485e-9715-8574dd4a8600 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.447870] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1658.448174] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bbe21609-50d2-45bd-9d0b-0b61a6a708ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.454349] env[63371]: DEBUG oslo_vmware.api [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Waiting for the task: (returnval){ [ 1658.454349] env[63371]: value = "task-1774413" [ 1658.454349] env[63371]: _type = "Task" [ 1658.454349] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.464300] env[63371]: DEBUG oslo_vmware.api [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774413, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.522025] env[63371]: DEBUG nova.network.neutron [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Updating instance_info_cache with network_info: [{"id": "c42db3e1-640d-4925-b5a3-adb5ddbd8177", "address": "fa:16:3e:65:14:e0", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc42db3e1-64", "ovs_interfaceid": "c42db3e1-640d-4925-b5a3-adb5ddbd8177", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.577025] env[63371]: DEBUG nova.compute.manager [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1658.581221] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "refresh_cache-ee3ea0ef-cde9-4326-b564-1aa216e00751" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1658.581640] env[63371]: DEBUG nova.compute.manager [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Instance network_info: |[{"id": "d78faddd-f19f-47ac-bee0-dd2e71ad5e42", "address": "fa:16:3e:08:1b:95", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd78faddd-f1", "ovs_interfaceid": "d78faddd-f19f-47ac-bee0-dd2e71ad5e42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1658.582073] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:1b:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd78faddd-f19f-47ac-bee0-dd2e71ad5e42', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1658.598720] env[63371]: DEBUG oslo.service.loopingcall [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1658.599184] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1658.600639] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d206b1c6-428b-445f-a196-e9f94223397f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.618912] env[63371]: DEBUG nova.scheduler.client.report [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1658.629910] env[63371]: DEBUG nova.virt.hardware [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1658.630843] env[63371]: DEBUG nova.virt.hardware [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1658.630843] env[63371]: DEBUG nova.virt.hardware [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1658.630843] env[63371]: DEBUG nova.virt.hardware [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1658.630843] env[63371]: DEBUG nova.virt.hardware [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1658.630843] env[63371]: DEBUG nova.virt.hardware [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1658.631745] env[63371]: DEBUG nova.virt.hardware [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1658.631745] env[63371]: DEBUG nova.virt.hardware [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1658.631745] env[63371]: DEBUG nova.virt.hardware [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1658.631745] env[63371]: DEBUG nova.virt.hardware [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1658.631745] env[63371]: DEBUG nova.virt.hardware [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1658.634338] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90bd9bc-4e2a-4d82-bd2f-55c45dd839c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.636981] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1658.636981] env[63371]: value = "task-1774414" [ 1658.636981] env[63371]: _type = "Task" [ 1658.636981] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.645175] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-914b763c-a183-4d4e-8e1b-305d097c9e29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.652624] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774414, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.655022] env[63371]: DEBUG nova.compute.manager [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Received event network-vif-plugged-d78faddd-f19f-47ac-bee0-dd2e71ad5e42 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1658.655225] env[63371]: DEBUG oslo_concurrency.lockutils [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] Acquiring lock "ee3ea0ef-cde9-4326-b564-1aa216e00751-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1658.655466] env[63371]: DEBUG oslo_concurrency.lockutils [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] Lock "ee3ea0ef-cde9-4326-b564-1aa216e00751-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1658.655610] env[63371]: DEBUG oslo_concurrency.lockutils [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] Lock "ee3ea0ef-cde9-4326-b564-1aa216e00751-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1658.655800] env[63371]: DEBUG nova.compute.manager [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] No waiting events found dispatching network-vif-plugged-d78faddd-f19f-47ac-bee0-dd2e71ad5e42 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1658.655994] env[63371]: WARNING nova.compute.manager [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Received unexpected event network-vif-plugged-d78faddd-f19f-47ac-bee0-dd2e71ad5e42 for instance with vm_state building and task_state spawning. [ 1658.656215] env[63371]: DEBUG nova.compute.manager [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Received event network-changed-d78faddd-f19f-47ac-bee0-dd2e71ad5e42 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1658.656355] env[63371]: DEBUG nova.compute.manager [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Refreshing instance network info cache due to event network-changed-d78faddd-f19f-47ac-bee0-dd2e71ad5e42. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1658.656488] env[63371]: DEBUG oslo_concurrency.lockutils [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] Acquiring lock "refresh_cache-ee3ea0ef-cde9-4326-b564-1aa216e00751" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1658.656613] env[63371]: DEBUG oslo_concurrency.lockutils [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] Acquired lock "refresh_cache-ee3ea0ef-cde9-4326-b564-1aa216e00751" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1658.656762] env[63371]: DEBUG nova.network.neutron [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Refreshing network info cache for port d78faddd-f19f-47ac-bee0-dd2e71ad5e42 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1658.828993] env[63371]: INFO nova.compute.manager [-] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Took 1.55 seconds to deallocate network for instance. [ 1658.891373] env[63371]: DEBUG nova.network.neutron [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Updated VIF entry in instance network info cache for port d78faddd-f19f-47ac-bee0-dd2e71ad5e42. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1658.891734] env[63371]: DEBUG nova.network.neutron [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Updating instance_info_cache with network_info: [{"id": "d78faddd-f19f-47ac-bee0-dd2e71ad5e42", "address": "fa:16:3e:08:1b:95", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd78faddd-f1", "ovs_interfaceid": "d78faddd-f19f-47ac-bee0-dd2e71ad5e42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.965634] env[63371]: DEBUG oslo_vmware.api [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774413, 'name': PowerOffVM_Task, 'duration_secs': 0.221692} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.966314] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1658.966491] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1658.966755] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce2593b4-62f5-42d1-b2ce-a6b8bfe30bd6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.024716] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "refresh_cache-40644960-1400-4dc6-9f2b-78afb7492a8d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.041955] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1659.042228] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1659.042409] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Deleting the datastore file [datastore1] 36d5c00a-4762-4801-aff1-0a22e336730a {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1659.042680] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6855465e-ad71-4cf4-8efc-fd909e8bb5f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.050642] env[63371]: DEBUG oslo_vmware.api [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Waiting for the task: (returnval){ [ 1659.050642] env[63371]: value = "task-1774416" [ 1659.050642] env[63371]: _type = "Task" [ 1659.050642] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.060704] env[63371]: DEBUG oslo_vmware.api [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774416, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.126334] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.614s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.126557] env[63371]: INFO nova.compute.manager [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Migrating [ 1659.133492] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.746s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.135057] env[63371]: INFO nova.compute.claims [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1659.157069] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774414, 'name': CreateVM_Task, 'duration_secs': 0.48722} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.157242] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1659.157902] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.158111] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.158489] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1659.158746] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89b0c15f-2bfc-4a52-aa93-fbd9e83aeb0f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.164564] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1659.164564] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f7678d-a631-2f45-7118-4e6d855622ad" [ 1659.164564] env[63371]: _type = "Task" [ 1659.164564] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.169388] env[63371]: DEBUG nova.network.neutron [-] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1659.176475] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f7678d-a631-2f45-7118-4e6d855622ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.335291] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.381265] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1659.382251] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f94ac5d3-e154-448e-8316-1e043ef4475b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.390817] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1659.390817] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8974e50b-024a-428d-93d2-413e90d34eb0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.393631] env[63371]: DEBUG oslo_concurrency.lockutils [req-e215b9da-788e-4e2f-8f39-042578d5f297 req-7b374f5c-9f75-494e-bd36-83c9d52213b4 service nova] Releasing lock "refresh_cache-ee3ea0ef-cde9-4326-b564-1aa216e00751" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.526641] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1659.526641] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1659.526641] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleting the datastore file [datastore1] 40644960-1400-4dc6-9f2b-78afb7492a8d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1659.526641] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bcfc101-5dc7-4322-9c18-27d9b8699db2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.537814] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1659.537814] env[63371]: value = "task-1774418" [ 1659.537814] env[63371]: _type = "Task" [ 1659.537814] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.555689] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774418, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.567293] env[63371]: DEBUG oslo_vmware.api [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Task: {'id': task-1774416, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.280462} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.567293] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1659.567503] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1659.572795] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1659.572795] env[63371]: INFO nova.compute.manager [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1659.572795] env[63371]: DEBUG oslo.service.loopingcall [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1659.572795] env[63371]: DEBUG nova.compute.manager [-] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1659.572795] env[63371]: DEBUG nova.network.neutron [-] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1659.649379] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.652050] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.652050] env[63371]: DEBUG nova.network.neutron [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1659.672476] env[63371]: INFO nova.compute.manager [-] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Took 1.34 seconds to deallocate network for instance. [ 1659.683076] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.683871] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Processing image 1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1659.683871] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1659.683871] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1659.683871] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1659.684733] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6ff944a-8d9a-4520-9ff1-dc9145729cbc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.695464] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1659.695464] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1659.695464] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08b041e7-beca-47a1-82ea-1cb21c9da27b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.703016] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1659.703016] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522b5448-0045-d5cf-046e-b206da36b10a" [ 1659.703016] env[63371]: _type = "Task" [ 1659.703016] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.713983] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Preparing fetch location {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1659.714358] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Fetch image to [datastore1] OSTACK_IMG_a7df9682-ef0b-4fa5-9279-c8e119fa6490/OSTACK_IMG_a7df9682-ef0b-4fa5-9279-c8e119fa6490.vmdk {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1659.714661] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Downloading stream optimized image 1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc to [datastore1] OSTACK_IMG_a7df9682-ef0b-4fa5-9279-c8e119fa6490/OSTACK_IMG_a7df9682-ef0b-4fa5-9279-c8e119fa6490.vmdk on the data store datastore1 as vApp {{(pid=63371) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1659.714863] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Downloading image file data 1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc to the ESX as VM named 'OSTACK_IMG_a7df9682-ef0b-4fa5-9279-c8e119fa6490' {{(pid=63371) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1659.740880] env[63371]: DEBUG nova.network.neutron [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Successfully updated port: b10b945f-6d31-4c0a-8698-336c1a98a865 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1659.814335] env[63371]: DEBUG oslo_vmware.rw_handles [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1659.814335] env[63371]: value = "resgroup-9" [ 1659.814335] env[63371]: _type = "ResourcePool" [ 1659.814335] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1659.814643] env[63371]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-5b35c085-dc09-4dac-92ef-8e26f76fad48 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.836287] env[63371]: DEBUG oslo_vmware.rw_handles [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lease: (returnval){ [ 1659.836287] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52692a37-0481-4e0e-a5d2-a860a4b2830f" [ 1659.836287] env[63371]: _type = "HttpNfcLease" [ 1659.836287] env[63371]: } obtained for vApp import into resource pool (val){ [ 1659.836287] env[63371]: value = "resgroup-9" [ 1659.836287] env[63371]: _type = "ResourcePool" [ 1659.836287] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1659.836535] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the lease: (returnval){ [ 1659.836535] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52692a37-0481-4e0e-a5d2-a860a4b2830f" [ 1659.836535] env[63371]: _type = "HttpNfcLease" [ 1659.836535] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1659.843561] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1659.843561] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52692a37-0481-4e0e-a5d2-a860a4b2830f" [ 1659.843561] env[63371]: _type = "HttpNfcLease" [ 1659.843561] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1660.046802] env[63371]: DEBUG oslo_vmware.api [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774418, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146489} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.046802] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1660.046802] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1660.046802] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1660.067388] env[63371]: INFO nova.scheduler.client.report [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted allocations for instance 40644960-1400-4dc6-9f2b-78afb7492a8d [ 1660.184372] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.248676] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "refresh_cache-61a44b0c-86fc-4f1c-a102-61eaff509d20" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.248676] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "refresh_cache-61a44b0c-86fc-4f1c-a102-61eaff509d20" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.248676] env[63371]: DEBUG nova.network.neutron [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1660.347213] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1660.347213] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52692a37-0481-4e0e-a5d2-a860a4b2830f" [ 1660.347213] env[63371]: _type = "HttpNfcLease" [ 1660.347213] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1660.354714] env[63371]: DEBUG nova.network.neutron [-] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1660.469845] env[63371]: DEBUG nova.compute.manager [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Received event network-vif-deleted-8fb2ce38-fb30-464a-9fa3-42bd21ffe84c {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1660.470120] env[63371]: DEBUG nova.compute.manager [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Received event network-vif-unplugged-c42db3e1-640d-4925-b5a3-adb5ddbd8177 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1660.470377] env[63371]: DEBUG oslo_concurrency.lockutils [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] Acquiring lock "40644960-1400-4dc6-9f2b-78afb7492a8d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.470615] env[63371]: DEBUG oslo_concurrency.lockutils [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] Lock "40644960-1400-4dc6-9f2b-78afb7492a8d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1660.470754] env[63371]: DEBUG oslo_concurrency.lockutils [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] Lock "40644960-1400-4dc6-9f2b-78afb7492a8d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1660.471712] env[63371]: DEBUG nova.compute.manager [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] No waiting events found dispatching network-vif-unplugged-c42db3e1-640d-4925-b5a3-adb5ddbd8177 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1660.472024] env[63371]: WARNING nova.compute.manager [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Received unexpected event network-vif-unplugged-c42db3e1-640d-4925-b5a3-adb5ddbd8177 for instance with vm_state shelved_offloaded and task_state None. [ 1660.472213] env[63371]: DEBUG nova.compute.manager [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Received event network-changed-c42db3e1-640d-4925-b5a3-adb5ddbd8177 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1660.472462] env[63371]: DEBUG nova.compute.manager [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Refreshing instance network info cache due to event network-changed-c42db3e1-640d-4925-b5a3-adb5ddbd8177. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1660.472729] env[63371]: DEBUG oslo_concurrency.lockutils [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] Acquiring lock "refresh_cache-40644960-1400-4dc6-9f2b-78afb7492a8d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.472927] env[63371]: DEBUG oslo_concurrency.lockutils [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] Acquired lock "refresh_cache-40644960-1400-4dc6-9f2b-78afb7492a8d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.474878] env[63371]: DEBUG nova.network.neutron [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Refreshing network info cache for port c42db3e1-640d-4925-b5a3-adb5ddbd8177 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1660.476161] env[63371]: DEBUG nova.network.neutron [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance_info_cache with network_info: [{"id": "826bbbf2-7d7e-47d0-9516-4cb91c3d94a7", "address": "fa:16:3e:67:7c:99", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap826bbbf2-7d", "ovs_interfaceid": "826bbbf2-7d7e-47d0-9516-4cb91c3d94a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1660.544097] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a50a9c72-62dc-4977-a477-a311232c63ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.553159] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14364fb2-3a76-42b0-9945-9b218506fdc8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.586052] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.586864] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f351f83a-ce6e-4488-baf0-7afa51351b26 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.594424] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0677ba44-0402-48e1-8078-f6ec24d7f6b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.611245] env[63371]: DEBUG nova.compute.provider_tree [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1660.697965] env[63371]: DEBUG nova.compute.manager [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Received event network-vif-plugged-b10b945f-6d31-4c0a-8698-336c1a98a865 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1660.698209] env[63371]: DEBUG oslo_concurrency.lockutils [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] Acquiring lock "61a44b0c-86fc-4f1c-a102-61eaff509d20-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.698433] env[63371]: DEBUG oslo_concurrency.lockutils [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] Lock "61a44b0c-86fc-4f1c-a102-61eaff509d20-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1660.698602] env[63371]: DEBUG oslo_concurrency.lockutils [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] Lock "61a44b0c-86fc-4f1c-a102-61eaff509d20-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1660.698777] env[63371]: DEBUG nova.compute.manager [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] No waiting events found dispatching network-vif-plugged-b10b945f-6d31-4c0a-8698-336c1a98a865 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1660.698948] env[63371]: WARNING nova.compute.manager [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Received unexpected event network-vif-plugged-b10b945f-6d31-4c0a-8698-336c1a98a865 for instance with vm_state building and task_state spawning. [ 1660.699134] env[63371]: DEBUG nova.compute.manager [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Received event network-changed-b10b945f-6d31-4c0a-8698-336c1a98a865 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1660.699311] env[63371]: DEBUG nova.compute.manager [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Refreshing instance network info cache due to event network-changed-b10b945f-6d31-4c0a-8698-336c1a98a865. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1660.699945] env[63371]: DEBUG oslo_concurrency.lockutils [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] Acquiring lock "refresh_cache-61a44b0c-86fc-4f1c-a102-61eaff509d20" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.834366] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "b523486c-adae-4322-80be-1f3bf33ca192" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.834685] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1660.846323] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1660.846323] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52692a37-0481-4e0e-a5d2-a860a4b2830f" [ 1660.846323] env[63371]: _type = "HttpNfcLease" [ 1660.846323] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1660.858924] env[63371]: INFO nova.compute.manager [-] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Took 1.29 seconds to deallocate network for instance. [ 1660.984569] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1660.998627] env[63371]: DEBUG nova.network.neutron [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1661.117503] env[63371]: DEBUG nova.scheduler.client.report [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1661.285290] env[63371]: DEBUG nova.network.neutron [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Updating instance_info_cache with network_info: [{"id": "b10b945f-6d31-4c0a-8698-336c1a98a865", "address": "fa:16:3e:42:c0:1f", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb10b945f-6d", "ovs_interfaceid": "b10b945f-6d31-4c0a-8698-336c1a98a865", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1661.338689] env[63371]: DEBUG nova.compute.utils [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1661.341550] env[63371]: DEBUG nova.network.neutron [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Updated VIF entry in instance network info cache for port c42db3e1-640d-4925-b5a3-adb5ddbd8177. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1661.341985] env[63371]: DEBUG nova.network.neutron [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Updating instance_info_cache with network_info: [{"id": "c42db3e1-640d-4925-b5a3-adb5ddbd8177", "address": "fa:16:3e:65:14:e0", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": null, "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc42db3e1-64", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1661.351289] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1661.351289] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52692a37-0481-4e0e-a5d2-a860a4b2830f" [ 1661.351289] env[63371]: _type = "HttpNfcLease" [ 1661.351289] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1661.352762] env[63371]: DEBUG oslo_vmware.rw_handles [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1661.352762] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52692a37-0481-4e0e-a5d2-a860a4b2830f" [ 1661.352762] env[63371]: _type = "HttpNfcLease" [ 1661.352762] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1661.352762] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba845014-6359-4aa5-8bb5-5a93ff5fd08f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.362081] env[63371]: DEBUG oslo_vmware.rw_handles [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5261cd58-03f0-35c0-9802-2485241eeb55/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1661.362296] env[63371]: DEBUG oslo_vmware.rw_handles [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5261cd58-03f0-35c0-9802-2485241eeb55/disk-0.vmdk. {{(pid=63371) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1661.419450] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.420344] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "40644960-1400-4dc6-9f2b-78afb7492a8d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.426316] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-63d3b2bd-ce11-4542-891d-2ccd67e56b90 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.624506] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.624506] env[63371]: DEBUG nova.compute.manager [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1661.626963] env[63371]: DEBUG oslo_concurrency.lockutils [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.668s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.627219] env[63371]: DEBUG nova.objects.instance [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lazy-loading 'resources' on Instance uuid e4608e3c-7083-42fa-b88c-8ee007ef7f60 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1661.794881] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "refresh_cache-61a44b0c-86fc-4f1c-a102-61eaff509d20" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.799023] env[63371]: DEBUG nova.compute.manager [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Instance network_info: |[{"id": "b10b945f-6d31-4c0a-8698-336c1a98a865", "address": "fa:16:3e:42:c0:1f", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb10b945f-6d", "ovs_interfaceid": "b10b945f-6d31-4c0a-8698-336c1a98a865", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1661.799023] env[63371]: DEBUG oslo_concurrency.lockutils [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] Acquired lock "refresh_cache-61a44b0c-86fc-4f1c-a102-61eaff509d20" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.799023] env[63371]: DEBUG nova.network.neutron [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Refreshing network info cache for port b10b945f-6d31-4c0a-8698-336c1a98a865 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1661.799023] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:c0:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b10b945f-6d31-4c0a-8698-336c1a98a865', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1661.806394] env[63371]: DEBUG oslo.service.loopingcall [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1661.807586] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1661.807884] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b561f56a-3e56-47a6-83f5-6b43d0721363 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.830271] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1661.830271] env[63371]: value = "task-1774420" [ 1661.830271] env[63371]: _type = "Task" [ 1661.830271] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.840422] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774420, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.844248] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.848264] env[63371]: DEBUG oslo_concurrency.lockutils [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] Releasing lock "refresh_cache-40644960-1400-4dc6-9f2b-78afb7492a8d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.848522] env[63371]: DEBUG nova.compute.manager [req-87ffe47d-072b-4187-be80-469656772397 req-73dc76bd-0f36-455a-a955-79ca6c3a55f7 service nova] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Received event network-vif-deleted-e6dc9116-b799-4666-8abb-a5e2ab4749e5 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1662.131019] env[63371]: DEBUG nova.compute.utils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1662.139077] env[63371]: DEBUG nova.compute.manager [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Not allocating networking since 'none' was specified. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1662.345616] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774420, 'name': CreateVM_Task, 'duration_secs': 0.406131} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.347696] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1662.348449] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1662.348611] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1662.348934] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1662.349438] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69935864-9cb0-47d2-beec-e659f3952889 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.355320] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1662.355320] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a2d4f9-d3df-8256-e92a-3fab67c01562" [ 1662.355320] env[63371]: _type = "Task" [ 1662.355320] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.369366] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a2d4f9-d3df-8256-e92a-3fab67c01562, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.400342] env[63371]: DEBUG oslo_vmware.rw_handles [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Completed reading data from the image iterator. {{(pid=63371) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1662.400477] env[63371]: DEBUG oslo_vmware.rw_handles [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5261cd58-03f0-35c0-9802-2485241eeb55/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1662.401819] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e6c190-381d-43cd-a695-e9078c0783a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.407990] env[63371]: DEBUG oslo_vmware.rw_handles [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5261cd58-03f0-35c0-9802-2485241eeb55/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1662.408190] env[63371]: DEBUG oslo_vmware.rw_handles [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5261cd58-03f0-35c0-9802-2485241eeb55/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1662.408397] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-f38584df-6015-4382-a3ff-9fdb2481acee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.507341] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86b673e2-c432-4199-a60f-095cba3be7f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.532671] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance 'e1bc4623-f6b5-4440-a58d-594e9cbe3628' progress to 0 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1662.567129] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91bc755f-6fa2-47f4-a5e8-97c149f5b89f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.575612] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d4ec9d-e5ae-45bf-997c-f47808757804 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.317181] env[63371]: DEBUG nova.compute.manager [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1663.321199] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "b523486c-adae-4322-80be-1f3bf33ca192" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.321462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.321688] env[63371]: INFO nova.compute.manager [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Attaching volume 16dfef5a-9dd9-48d8-b733-f65d801d0391 to /dev/sdb [ 1663.324422] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1663.333576] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2f0474cd-19dc-484f-8979-929bceb65530 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.335373] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de17b0c8-eb49-48db-a3ea-913d29d34bf6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.337971] env[63371]: DEBUG oslo_vmware.rw_handles [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5261cd58-03f0-35c0-9802-2485241eeb55/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1663.338230] env[63371]: INFO nova.virt.vmwareapi.images [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Downloaded image file data 1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc [ 1663.340831] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Acquiring lock "c04edf6d-8a07-4776-be0f-b763fb3059d2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.341059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Lock "c04edf6d-8a07-4776-be0f-b763fb3059d2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.342611] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3c8729-b2dd-4c30-a803-b71583fe2437 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.353479] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a2d4f9-d3df-8256-e92a-3fab67c01562, 'name': SearchDatastore_Task, 'duration_secs': 0.017725} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.366251] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.366700] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1663.366837] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.366910] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.367071] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1663.368738] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1663.368738] env[63371]: value = "task-1774421" [ 1663.368738] env[63371]: _type = "Task" [ 1663.368738] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.369038] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-58e107c5-232b-4065-a736-7d26e3c02224 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.374328] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5bd7145-36b6-4bc4-aac1-bf0c92bcafd8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.376084] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8d608dc-f2e3-4065-9cd1-8ccfcb866066 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.383196] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07dc3da-6ab2-424a-aeb5-b1d17847c298 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.394780] env[63371]: DEBUG nova.compute.provider_tree [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1663.401229] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1663.401229] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1663.402112] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774421, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.402966] env[63371]: DEBUG nova.network.neutron [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Updated VIF entry in instance network info cache for port b10b945f-6d31-4c0a-8698-336c1a98a865. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1663.403319] env[63371]: DEBUG nova.network.neutron [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Updating instance_info_cache with network_info: [{"id": "b10b945f-6d31-4c0a-8698-336c1a98a865", "address": "fa:16:3e:42:c0:1f", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb10b945f-6d", "ovs_interfaceid": "b10b945f-6d31-4c0a-8698-336c1a98a865", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.409709] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1657f8bc-5ebf-46db-ace4-f94683239582 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.416665] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1663.416665] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ec42ef-3fae-48fe-2816-789c7b2b6241" [ 1663.416665] env[63371]: _type = "Task" [ 1663.416665] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.418768] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301389ef-415a-4ab4-abb9-989e27d80425 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.421809] env[63371]: INFO nova.virt.vmwareapi.images [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] The imported VM was unregistered [ 1663.424873] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Caching image {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1663.425237] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating directory with path [datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1663.430613] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e734d26d-bf43-4bed-a040-cb28568ed97a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.444489] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ec42ef-3fae-48fe-2816-789c7b2b6241, 'name': SearchDatastore_Task, 'duration_secs': 0.010046} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.449158] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Created directory with path [datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1663.449475] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_a7df9682-ef0b-4fa5-9279-c8e119fa6490/OSTACK_IMG_a7df9682-ef0b-4fa5-9279-c8e119fa6490.vmdk to [datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc.vmdk. {{(pid=63371) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1663.449976] env[63371]: DEBUG nova.virt.block_device [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updating existing volume attachment record: 285b59bf-e8d7-47e6-93c5-44c708640cdb {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1663.454238] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dde7f12e-fef5-4caa-b119-19d83b95b8cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.456294] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-eadb55b5-8b87-4e44-a1a1-21144a39405f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.461656] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1663.461656] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5299ceeb-e8ce-5a12-ebad-212c46cfe106" [ 1663.461656] env[63371]: _type = "Task" [ 1663.461656] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.465646] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1663.465646] env[63371]: value = "task-1774423" [ 1663.465646] env[63371]: _type = "Task" [ 1663.465646] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.472046] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5299ceeb-e8ce-5a12-ebad-212c46cfe106, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.476047] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774423, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.848812] env[63371]: DEBUG nova.compute.manager [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1663.886537] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774421, 'name': PowerOffVM_Task, 'duration_secs': 0.252702} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.886833] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1663.887029] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance 'e1bc4623-f6b5-4440-a58d-594e9cbe3628' progress to 17 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1663.913073] env[63371]: DEBUG oslo_concurrency.lockutils [req-4876b2b1-a50b-4e48-b4d9-9204fec86663 req-1d83ec31-0fdf-4e92-88fe-e6212ce8176e service nova] Releasing lock "refresh_cache-61a44b0c-86fc-4f1c-a102-61eaff509d20" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.928483] env[63371]: ERROR nova.scheduler.client.report [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] [req-5e1d327d-1eaa-4f1c-a000-b4b29818b864] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5e1d327d-1eaa-4f1c-a000-b4b29818b864"}]} [ 1663.948937] env[63371]: DEBUG nova.scheduler.client.report [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1663.973974] env[63371]: DEBUG nova.scheduler.client.report [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1663.974222] env[63371]: DEBUG nova.compute.provider_tree [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1663.981563] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5299ceeb-e8ce-5a12-ebad-212c46cfe106, 'name': SearchDatastore_Task, 'duration_secs': 0.010633} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.981888] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.982158] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 61a44b0c-86fc-4f1c-a102-61eaff509d20/61a44b0c-86fc-4f1c-a102-61eaff509d20.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1663.982449] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61fab896-9238-441e-8993-9920254e29e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.988874] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774423, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.990894] env[63371]: DEBUG nova.scheduler.client.report [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1663.996016] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1663.996016] env[63371]: value = "task-1774427" [ 1663.996016] env[63371]: _type = "Task" [ 1663.996016] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.004311] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774427, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.013267] env[63371]: DEBUG nova.scheduler.client.report [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1664.333558] env[63371]: DEBUG nova.compute.manager [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1664.350754] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ba8fdd-64d0-4836-ab71-5429aa78fa83 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.365742] env[63371]: DEBUG nova.virt.hardware [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1664.366023] env[63371]: DEBUG nova.virt.hardware [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1664.366201] env[63371]: DEBUG nova.virt.hardware [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1664.366384] env[63371]: DEBUG nova.virt.hardware [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1664.366527] env[63371]: DEBUG nova.virt.hardware [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1664.366669] env[63371]: DEBUG nova.virt.hardware [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1664.366895] env[63371]: DEBUG nova.virt.hardware [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1664.367067] env[63371]: DEBUG nova.virt.hardware [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1664.367236] env[63371]: DEBUG nova.virt.hardware [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1664.367395] env[63371]: DEBUG nova.virt.hardware [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1664.367560] env[63371]: DEBUG nova.virt.hardware [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1664.368763] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fceb58f-92d5-47e1-800a-6b095c1ef1b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.372916] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25431012-8a4e-4f78-8ddb-53da888d52b8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.376269] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1664.406674] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1664.406894] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1664.407065] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1664.407252] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1664.407429] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1664.407535] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1664.407733] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1664.407888] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1664.408067] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1664.408230] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1664.408398] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1664.416397] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-26abaccd-e54b-486a-9594-c4cb6e0aa74d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.427282] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e246bd-23a8-4f53-b8be-9c3f86332cba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.431454] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc421821-de63-4220-95e0-db4a1bc17ee4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.441344] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e732e84c-8a25-43bb-b1f4-d2aed2badf98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.453757] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1664.453757] env[63371]: value = "task-1774428" [ 1664.453757] env[63371]: _type = "Task" [ 1664.453757] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.454196] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1664.459670] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Creating folder: Project (451c0ae090e949d9961d2e1401f95302). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1664.460883] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59132374-580a-4b8f-91a9-46527722739e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.473806] env[63371]: DEBUG nova.compute.provider_tree [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1664.484756] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774428, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.488438] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774423, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.489968] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Created folder: Project (451c0ae090e949d9961d2e1401f95302) in parent group-v368199. [ 1664.490166] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Creating folder: Instances. Parent ref: group-v368420. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1664.490444] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b7fc4472-a899-477b-8005-2a3070bdfb89 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.501473] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Created folder: Instances in parent group-v368420. [ 1664.501711] env[63371]: DEBUG oslo.service.loopingcall [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1664.502270] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1664.502487] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4bd85a39-29fe-4f07-916b-0004b3022fa6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.519489] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774427, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.528680] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1664.528680] env[63371]: value = "task-1774431" [ 1664.528680] env[63371]: _type = "Task" [ 1664.528680] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.540428] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774431, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.972019] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774428, 'name': ReconfigVM_Task, 'duration_secs': 0.237647} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.972277] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance 'e1bc4623-f6b5-4440-a58d-594e9cbe3628' progress to 33 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1664.989635] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774423, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.008059] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774427, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.009480] env[63371]: DEBUG nova.scheduler.client.report [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 110 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1665.009480] env[63371]: DEBUG nova.compute.provider_tree [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 110 to 111 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1665.009480] env[63371]: DEBUG nova.compute.provider_tree [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1665.040861] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774431, 'name': CreateVM_Task, 'duration_secs': 0.40254} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.041049] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1665.041507] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.041667] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.042052] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1665.042636] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a832085-32af-4f3b-9a56-035057b2fb72 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.050220] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1665.050220] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d2499e-a527-56c2-eb8a-ee18d70f0556" [ 1665.050220] env[63371]: _type = "Task" [ 1665.050220] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.061831] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d2499e-a527-56c2-eb8a-ee18d70f0556, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.482613] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1665.482956] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1665.483116] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1665.483317] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1665.483460] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1665.483604] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1665.483805] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1665.483958] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1665.484384] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1665.484596] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1665.484773] env[63371]: DEBUG nova.virt.hardware [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1665.490436] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Reconfiguring VM instance instance-0000004c to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1665.490753] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a30d658f-7a4c-4578-bd60-ecf4fd7ccf09 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.512046] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774423, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.515647] env[63371]: DEBUG oslo_concurrency.lockutils [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.889s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1665.517654] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774427, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.519199] env[63371]: DEBUG oslo_concurrency.lockutils [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.559s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1665.519433] env[63371]: DEBUG nova.objects.instance [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lazy-loading 'resources' on Instance uuid 3065fc71-f127-43b7-83b7-70140f29965b {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1665.520644] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1665.520644] env[63371]: value = "task-1774432" [ 1665.520644] env[63371]: _type = "Task" [ 1665.520644] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.533751] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774432, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.543473] env[63371]: INFO nova.scheduler.client.report [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Deleted allocations for instance e4608e3c-7083-42fa-b88c-8ee007ef7f60 [ 1665.562364] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d2499e-a527-56c2-eb8a-ee18d70f0556, 'name': SearchDatastore_Task, 'duration_secs': 0.082123} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.562781] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.563428] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1665.563428] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1665.563428] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1665.563626] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1665.563796] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5094558d-4ff1-4ef7-81a5-85b02db76481 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.583675] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1665.583867] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1665.584850] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1988578b-ff3a-4e85-8d13-7c4b7d63cd44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.592568] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1665.592568] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52df68a0-fb90-2921-df33-15da9da477d8" [ 1665.592568] env[63371]: _type = "Task" [ 1665.592568] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.604533] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df68a0-fb90-2921-df33-15da9da477d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.994298] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774423, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.016737] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774427, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.033993] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774432, 'name': ReconfigVM_Task, 'duration_secs': 0.247585} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.034301] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Reconfigured VM instance instance-0000004c to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1666.035168] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34747409-f013-4ddb-af90-7f0a1727a633 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.057779] env[63371]: DEBUG oslo_concurrency.lockutils [None req-65a06e5f-03ac-43e4-beeb-06f5edc04725 tempest-ServersAdminTestJSON-1763458454 tempest-ServersAdminTestJSON-1763458454-project-member] Lock "e4608e3c-7083-42fa-b88c-8ee007ef7f60" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.804s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1666.067638] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] e1bc4623-f6b5-4440-a58d-594e9cbe3628/e1bc4623-f6b5-4440-a58d-594e9cbe3628.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1666.071047] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2296bb1-8196-46ce-ace6-f932fdc43006 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.095390] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1666.095390] env[63371]: value = "task-1774433" [ 1666.095390] env[63371]: _type = "Task" [ 1666.095390] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.109030] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52df68a0-fb90-2921-df33-15da9da477d8, 'name': SearchDatastore_Task, 'duration_secs': 0.090516} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.112672] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774433, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.112901] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79ae3672-3c30-4942-b146-7de6b63622c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.120889] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1666.120889] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52629e7b-ffeb-e0b7-9642-ace67572c92b" [ 1666.120889] env[63371]: _type = "Task" [ 1666.120889] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.130568] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52629e7b-ffeb-e0b7-9642-ace67572c92b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.432049] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a21b8509-856e-4e00-a7af-ae6a74abe231 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.438681] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3916e71-aaae-4ad9-80b1-e3daa952eb6d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.474055] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91dd445-b74d-4123-b88d-e7edad2ff0e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.486984] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0bb998-1755-4219-8cc4-b2006ef02672 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.495160] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774423, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.616936} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.495809] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_a7df9682-ef0b-4fa5-9279-c8e119fa6490/OSTACK_IMG_a7df9682-ef0b-4fa5-9279-c8e119fa6490.vmdk to [datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc.vmdk. [ 1666.495992] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Cleaning up location [datastore1] OSTACK_IMG_a7df9682-ef0b-4fa5-9279-c8e119fa6490 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1666.496197] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_a7df9682-ef0b-4fa5-9279-c8e119fa6490 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1666.496612] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fdd37b26-52e2-4a5d-bb9b-8348d078b929 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.508308] env[63371]: DEBUG nova.compute.provider_tree [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1666.515584] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1666.515584] env[63371]: value = "task-1774434" [ 1666.515584] env[63371]: _type = "Task" [ 1666.515584] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.523911] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774427, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.098486} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.524581] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 61a44b0c-86fc-4f1c-a102-61eaff509d20/61a44b0c-86fc-4f1c-a102-61eaff509d20.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1666.524809] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1666.525163] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b5e32969-8d4a-44d0-b381-0891bdaff495 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.530083] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774434, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.535408] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1666.535408] env[63371]: value = "task-1774435" [ 1666.535408] env[63371]: _type = "Task" [ 1666.535408] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.544319] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774435, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.608127] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774433, 'name': ReconfigVM_Task, 'duration_secs': 0.471421} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.608427] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Reconfigured VM instance instance-0000004c to attach disk [datastore1] e1bc4623-f6b5-4440-a58d-594e9cbe3628/e1bc4623-f6b5-4440-a58d-594e9cbe3628.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1666.608711] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance 'e1bc4623-f6b5-4440-a58d-594e9cbe3628' progress to 50 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1666.631369] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52629e7b-ffeb-e0b7-9642-ace67572c92b, 'name': SearchDatastore_Task, 'duration_secs': 0.010962} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.631549] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1666.631802] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 33952466-3df7-4485-8e7a-ab3d6ec3f22c/33952466-3df7-4485-8e7a-ab3d6ec3f22c.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1666.632072] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50741e9d-7581-4486-8701-71961abcbffb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.639822] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1666.639822] env[63371]: value = "task-1774436" [ 1666.639822] env[63371]: _type = "Task" [ 1666.639822] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.650018] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.016833] env[63371]: DEBUG nova.scheduler.client.report [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1667.032167] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774434, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.04082} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.032167] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1667.033532] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1667.033532] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc.vmdk to [datastore1] ee3ea0ef-cde9-4326-b564-1aa216e00751/ee3ea0ef-cde9-4326-b564-1aa216e00751.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1667.034177] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c93bd0ee-5068-40b3-a155-c01b71da1914 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.046903] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774435, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080142} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.048326] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1667.049427] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1667.049427] env[63371]: value = "task-1774437" [ 1667.049427] env[63371]: _type = "Task" [ 1667.049427] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.049427] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44639e02-6827-4c65-b0b8-a3c212869762 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.077860] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 61a44b0c-86fc-4f1c-a102-61eaff509d20/61a44b0c-86fc-4f1c-a102-61eaff509d20.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1667.081790] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0d77c12-e27e-48af-a4ed-0dca3b5859c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.103347] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774437, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.109009] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1667.109009] env[63371]: value = "task-1774438" [ 1667.109009] env[63371]: _type = "Task" [ 1667.109009] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.118787] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c1d152-0af2-4123-ac38-030408f9ad27 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.121192] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774438, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.140570] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfeff1b-d0f8-4041-b4ec-dab95c721ecd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.167047] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.453016} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.170800] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance 'e1bc4623-f6b5-4440-a58d-594e9cbe3628' progress to 67 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1667.171135] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 33952466-3df7-4485-8e7a-ab3d6ec3f22c/33952466-3df7-4485-8e7a-ab3d6ec3f22c.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1667.171135] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1667.175018] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-859e9eac-e540-4492-a6ed-d60b78c48ecb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.179208] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1667.179208] env[63371]: value = "task-1774439" [ 1667.179208] env[63371]: _type = "Task" [ 1667.179208] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.187739] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.527444] env[63371]: DEBUG oslo_concurrency.lockutils [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.008s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1667.531222] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.196s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.531333] env[63371]: DEBUG nova.objects.instance [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lazy-loading 'resources' on Instance uuid e8bd5802-d2ff-4348-92d4-c23277f4eaeb {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1667.560797] env[63371]: INFO nova.scheduler.client.report [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Deleted allocations for instance 3065fc71-f127-43b7-83b7-70140f29965b [ 1667.575369] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774437, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.622407] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774438, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.691626] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067535} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.691926] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1667.696135] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3e6893-754b-4618-8cfa-3b3010b24491 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.722266] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 33952466-3df7-4485-8e7a-ab3d6ec3f22c/33952466-3df7-4485-8e7a-ab3d6ec3f22c.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1667.723642] env[63371]: DEBUG nova.network.neutron [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Port 826bbbf2-7d7e-47d0-9516-4cb91c3d94a7 binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1667.725476] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6da48ce0-a959-42a7-bfed-e78ffe329364 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.763140] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1667.763140] env[63371]: value = "task-1774440" [ 1667.763140] env[63371]: _type = "Task" [ 1667.763140] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.777951] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774440, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.064129] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774437, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.075131] env[63371]: DEBUG oslo_concurrency.lockutils [None req-00ee303e-d712-4a46-8a1a-3945f6d583a4 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "3065fc71-f127-43b7-83b7-70140f29965b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.700s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.125973] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774438, 'name': ReconfigVM_Task, 'duration_secs': 0.747916} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.127022] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 61a44b0c-86fc-4f1c-a102-61eaff509d20/61a44b0c-86fc-4f1c-a102-61eaff509d20.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1668.127457] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1533200-dd3d-4415-ba8a-a00255ff804d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.140062] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1668.140062] env[63371]: value = "task-1774442" [ 1668.140062] env[63371]: _type = "Task" [ 1668.140062] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.154101] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774442, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.277941] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.278438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1668.278438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1668.293044] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774440, 'name': ReconfigVM_Task, 'duration_secs': 0.45417} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.293963] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 33952466-3df7-4485-8e7a-ab3d6ec3f22c/33952466-3df7-4485-8e7a-ab3d6ec3f22c.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1668.294644] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9654010-9ed2-47fa-b5d8-94d15690d054 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.310556] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1668.310556] env[63371]: value = "task-1774443" [ 1668.310556] env[63371]: _type = "Task" [ 1668.310556] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.323304] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774443, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.474394] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef645c9b-41a3-4d68-bda8-e570e5a1ca7a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.484548] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592145de-1b2d-4c92-8834-524eca381543 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.519715] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be56c36b-046f-4e4a-89c1-5c66e991f950 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.530236] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f757b0d-8a05-42b8-85de-77719c375cbc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.547881] env[63371]: DEBUG nova.compute.provider_tree [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1668.565907] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774437, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.652852] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774442, 'name': Rename_Task, 'duration_secs': 0.491401} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.655718] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1668.655718] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ad30aee-8cb1-4aa9-bc6f-448d9f40cae6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.664269] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1668.664269] env[63371]: value = "task-1774444" [ 1668.664269] env[63371]: _type = "Task" [ 1668.664269] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.674913] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774444, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.825188] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774443, 'name': Rename_Task, 'duration_secs': 0.166438} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.825540] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1668.826049] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84b0c10c-8971-46f2-9cd7-61358d98e63e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.834978] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1668.834978] env[63371]: value = "task-1774445" [ 1668.834978] env[63371]: _type = "Task" [ 1668.834978] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.850098] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774445, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.900932] env[63371]: DEBUG nova.compute.manager [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1668.902427] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4c81d3-e502-41b1-9f00-04b483e68b63 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.071224] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774437, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.093807] env[63371]: DEBUG nova.scheduler.client.report [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 111 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1669.094348] env[63371]: DEBUG nova.compute.provider_tree [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 111 to 112 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1669.094348] env[63371]: DEBUG nova.compute.provider_tree [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1669.179879] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774444, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.337143] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1669.337378] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1669.337562] env[63371]: DEBUG nova.network.neutron [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1669.362380] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774445, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.421692] env[63371]: INFO nova.compute.manager [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] instance snapshotting [ 1669.422345] env[63371]: DEBUG nova.objects.instance [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'flavor' on Instance uuid 9862b0f0-ccf6-4e69-9e78-cf864adaa65e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1669.567509] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774437, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.416207} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.567778] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc/1ea0b1e7-d0ab-48ef-b7ff-0b3a642579bc.vmdk to [datastore1] ee3ea0ef-cde9-4326-b564-1aa216e00751/ee3ea0ef-cde9-4326-b564-1aa216e00751.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1669.568738] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98061a9-b37d-40c3-83ed-f6775af1929f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.597325] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] ee3ea0ef-cde9-4326-b564-1aa216e00751/ee3ea0ef-cde9-4326-b564-1aa216e00751.vmdk or device None with type streamOptimized {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1669.597547] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a9b4ca5-5d25-4225-8bdf-815c21efeb5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.614498] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.083s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.617030] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.433s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.617520] env[63371]: DEBUG nova.objects.instance [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lazy-loading 'resources' on Instance uuid 3c6294ae-9a16-4f1e-abd4-1aec224625ac {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1669.625033] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1669.625033] env[63371]: value = "task-1774446" [ 1669.625033] env[63371]: _type = "Task" [ 1669.625033] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.636097] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774446, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.643715] env[63371]: INFO nova.scheduler.client.report [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Deleted allocations for instance e8bd5802-d2ff-4348-92d4-c23277f4eaeb [ 1669.675338] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774444, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.676376] env[63371]: DEBUG nova.network.neutron [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance_info_cache with network_info: [{"id": "826bbbf2-7d7e-47d0-9516-4cb91c3d94a7", "address": "fa:16:3e:67:7c:99", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap826bbbf2-7d", "ovs_interfaceid": "826bbbf2-7d7e-47d0-9516-4cb91c3d94a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1669.852069] env[63371]: DEBUG oslo_vmware.api [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774445, 'name': PowerOnVM_Task, 'duration_secs': 0.589805} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1669.852069] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1669.852069] env[63371]: INFO nova.compute.manager [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Took 5.52 seconds to spawn the instance on the hypervisor. [ 1669.852069] env[63371]: DEBUG nova.compute.manager [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1669.852069] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a37f1b-5790-47f2-a8f2-2b6db29d8e8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.929166] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e1f3072-6c90-4e1d-93ec-985a348cc267 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.950503] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99276e80-10dc-4fb4-adbf-c206f4052b73 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.134571] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774446, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.159877] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90076925-7d22-4556-a735-1528c1899fea tempest-ServersV294TestFqdnHostnames-839710686 tempest-ServersV294TestFqdnHostnames-839710686-project-member] Lock "e8bd5802-d2ff-4348-92d4-c23277f4eaeb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.042s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.175220] env[63371]: DEBUG oslo_vmware.api [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774444, 'name': PowerOnVM_Task, 'duration_secs': 1.088791} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1670.175495] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1670.175694] env[63371]: INFO nova.compute.manager [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Took 11.60 seconds to spawn the instance on the hypervisor. [ 1670.175864] env[63371]: DEBUG nova.compute.manager [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1670.177390] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc16eae-ac39-4f03-9a52-e556a2aa0bf6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.183092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1670.375361] env[63371]: INFO nova.compute.manager [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Took 19.00 seconds to build instance. [ 1670.461485] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1670.461841] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8d4f1e08-7540-4d8a-8e99-4029c286be80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.467811] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e397226e-c3f0-47d1-b077-266e08783185 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.471492] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1670.471492] env[63371]: value = "task-1774447" [ 1670.471492] env[63371]: _type = "Task" [ 1670.471492] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.477863] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7af433-c647-4ce8-bd5e-8b8777ffb730 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.483680] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774447, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.513387] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1e76eb3-e1d3-439a-b730-d04cf2b14781 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.517206] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1670.517505] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368419', 'volume_id': '16dfef5a-9dd9-48d8-b733-f65d801d0391', 'name': 'volume-16dfef5a-9dd9-48d8-b733-f65d801d0391', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b523486c-adae-4322-80be-1f3bf33ca192', 'attached_at': '', 'detached_at': '', 'volume_id': '16dfef5a-9dd9-48d8-b733-f65d801d0391', 'serial': '16dfef5a-9dd9-48d8-b733-f65d801d0391'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1670.518230] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfa123f3-c761-4537-a1da-9b7da6799613 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.536851] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fb3f3a-b138-4890-b39d-bab60c28c193 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.540982] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6026fdeb-512d-4979-95ea-b81559228817 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.579453] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] volume-16dfef5a-9dd9-48d8-b733-f65d801d0391/volume-16dfef5a-9dd9-48d8-b733-f65d801d0391.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1670.579701] env[63371]: DEBUG nova.compute.provider_tree [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1670.581093] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b1a3393-41d1-40be-9be1-c0575d672ff8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.603137] env[63371]: DEBUG oslo_vmware.api [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1670.603137] env[63371]: value = "task-1774448" [ 1670.603137] env[63371]: _type = "Task" [ 1670.603137] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.612629] env[63371]: DEBUG oslo_vmware.api [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774448, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.636048] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774446, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.707608] env[63371]: INFO nova.compute.manager [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Took 22.27 seconds to build instance. [ 1670.714552] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-348930b4-0b05-458e-85a1-802c9b44aa01 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.738008] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f865f56e-db56-4b43-b86a-7ac73694364b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.746245] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance 'e1bc4623-f6b5-4440-a58d-594e9cbe3628' progress to 83 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1670.787917] env[63371]: INFO nova.compute.manager [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Rebuilding instance [ 1670.848757] env[63371]: DEBUG nova.compute.manager [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1670.849675] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb5fe95c-56c0-4f5d-91cc-f3f469bf44b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.879331] env[63371]: DEBUG oslo_concurrency.lockutils [None req-933712d0-2034-44d8-b679-b4131608b2d6 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lock "33952466-3df7-4485-8e7a-ab3d6ec3f22c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.517s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.982521] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774447, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.113350] env[63371]: DEBUG oslo_vmware.api [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774448, 'name': ReconfigVM_Task, 'duration_secs': 0.415517} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.113657] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Reconfigured VM instance instance-0000002f to attach disk [datastore1] volume-16dfef5a-9dd9-48d8-b733-f65d801d0391/volume-16dfef5a-9dd9-48d8-b733-f65d801d0391.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1671.118560] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe94b108-3b11-4b9b-a5bc-317b8ff717a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.138660] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774446, 'name': ReconfigVM_Task, 'duration_secs': 1.021294} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.143023] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Reconfigured VM instance instance-00000050 to attach disk [datastore1] ee3ea0ef-cde9-4326-b564-1aa216e00751/ee3ea0ef-cde9-4326-b564-1aa216e00751.vmdk or device None with type streamOptimized {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1671.143023] env[63371]: DEBUG oslo_vmware.api [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1671.143023] env[63371]: value = "task-1774449" [ 1671.143023] env[63371]: _type = "Task" [ 1671.143023] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.143023] env[63371]: DEBUG nova.scheduler.client.report [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 112 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1671.143023] env[63371]: DEBUG nova.compute.provider_tree [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 112 to 113 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1671.143023] env[63371]: DEBUG nova.compute.provider_tree [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1671.145465] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a61d6b6a-ab98-49bb-ac65-fde44853fff6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.157921] env[63371]: DEBUG oslo_vmware.api [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774449, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.159610] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1671.159610] env[63371]: value = "task-1774450" [ 1671.159610] env[63371]: _type = "Task" [ 1671.159610] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.168810] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774450, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.210113] env[63371]: DEBUG oslo_concurrency.lockutils [None req-31ed10db-7bde-4611-a9f1-575dbe07713a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "61a44b0c-86fc-4f1c-a102-61eaff509d20" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.788s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.255022] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1671.255022] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-497f62cd-4632-467c-82fe-c4f1023f6067 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.261495] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1671.261495] env[63371]: value = "task-1774451" [ 1671.261495] env[63371]: _type = "Task" [ 1671.261495] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.272668] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774451, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.364060] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1671.364060] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09c41f28-5f46-4f15-b34e-f796812ad609 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.372067] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1671.372067] env[63371]: value = "task-1774452" [ 1671.372067] env[63371]: _type = "Task" [ 1671.372067] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.381187] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774452, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.487191] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774447, 'name': CreateSnapshot_Task, 'duration_secs': 0.888417} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.487191] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1671.487191] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592ebc99-103e-461b-bc92-7498238f050e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.651022] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.031s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.654753] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.069s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.655232] env[63371]: DEBUG nova.objects.instance [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lazy-loading 'resources' on Instance uuid 40644960-1400-4dc6-9f2b-78afb7492a8d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1671.663227] env[63371]: DEBUG oslo_vmware.api [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774449, 'name': ReconfigVM_Task, 'duration_secs': 0.154966} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.671096] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368419', 'volume_id': '16dfef5a-9dd9-48d8-b733-f65d801d0391', 'name': 'volume-16dfef5a-9dd9-48d8-b733-f65d801d0391', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b523486c-adae-4322-80be-1f3bf33ca192', 'attached_at': '', 'detached_at': '', 'volume_id': '16dfef5a-9dd9-48d8-b733-f65d801d0391', 'serial': '16dfef5a-9dd9-48d8-b733-f65d801d0391'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1671.674094] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "61a44b0c-86fc-4f1c-a102-61eaff509d20" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.678619] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "61a44b0c-86fc-4f1c-a102-61eaff509d20" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.678619] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "61a44b0c-86fc-4f1c-a102-61eaff509d20-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1671.678619] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "61a44b0c-86fc-4f1c-a102-61eaff509d20-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1671.678619] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "61a44b0c-86fc-4f1c-a102-61eaff509d20-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1671.679763] env[63371]: INFO nova.scheduler.client.report [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Deleted allocations for instance 3c6294ae-9a16-4f1e-abd4-1aec224625ac [ 1671.686395] env[63371]: INFO nova.compute.manager [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Terminating instance [ 1671.694617] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774450, 'name': Rename_Task, 'duration_secs': 0.208583} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.695243] env[63371]: DEBUG nova.compute.manager [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1671.695432] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1671.695712] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1671.696703] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b6594ea-8f90-4a1d-bf81-56eccba3c6e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.699557] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5de33c50-e9e2-4f79-bc78-f0307b108182 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.707309] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1671.708679] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-18f8b118-b59e-43f8-a907-dcd491f0568b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.710294] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1671.710294] env[63371]: value = "task-1774453" [ 1671.710294] env[63371]: _type = "Task" [ 1671.710294] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.715083] env[63371]: DEBUG oslo_vmware.api [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1671.715083] env[63371]: value = "task-1774454" [ 1671.715083] env[63371]: _type = "Task" [ 1671.715083] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.721945] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774453, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.731153] env[63371]: DEBUG oslo_vmware.api [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774454, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.772618] env[63371]: DEBUG oslo_vmware.api [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774451, 'name': PowerOnVM_Task, 'duration_secs': 0.493979} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.772922] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1671.773139] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d540c01d-643b-444a-ab04-5621d9255494 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance 'e1bc4623-f6b5-4440-a58d-594e9cbe3628' progress to 100 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1671.884085] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774452, 'name': PowerOffVM_Task, 'duration_secs': 0.133847} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.884392] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1671.884715] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1671.885480] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b09915d9-977c-426c-8ff7-9d4552fe7556 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.892359] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1671.892602] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea268f5d-299e-4422-b23f-7a1f94be903a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.919348] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1671.919576] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1671.919764] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Deleting the datastore file [datastore1] 33952466-3df7-4485-8e7a-ab3d6ec3f22c {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1671.920094] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f013139-406c-4a12-bc50-03023e99fd80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.927286] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1671.927286] env[63371]: value = "task-1774456" [ 1671.927286] env[63371]: _type = "Task" [ 1671.927286] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.937027] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774456, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.004554] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1672.004554] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-01b2b476-549e-4da2-83a4-c0350578f848 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.014969] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1672.014969] env[63371]: value = "task-1774457" [ 1672.014969] env[63371]: _type = "Task" [ 1672.014969] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.023507] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774457, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.158263] env[63371]: DEBUG nova.objects.instance [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lazy-loading 'numa_topology' on Instance uuid 40644960-1400-4dc6-9f2b-78afb7492a8d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1672.198654] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4477eb18-eb95-4712-acc5-89198d8cded6 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "3c6294ae-9a16-4f1e-abd4-1aec224625ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.018s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.225683] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774453, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.233697] env[63371]: DEBUG oslo_vmware.api [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774454, 'name': PowerOffVM_Task, 'duration_secs': 0.279523} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.234055] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1672.234276] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1672.234577] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-533de0f9-5411-4e56-bed8-a7bdb7e9f2d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.438667] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774456, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109005} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.438935] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1672.439336] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1672.439554] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1672.528070] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774457, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.559537] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1672.559756] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1672.561201] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleting the datastore file [datastore1] 61a44b0c-86fc-4f1c-a102-61eaff509d20 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1672.561201] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b73ba40-e98f-4fb6-81bb-7353a0448895 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.567379] env[63371]: DEBUG oslo_vmware.api [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1672.567379] env[63371]: value = "task-1774459" [ 1672.567379] env[63371]: _type = "Task" [ 1672.567379] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1672.575682] env[63371]: DEBUG oslo_vmware.api [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.661288] env[63371]: DEBUG nova.objects.base [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Object Instance<40644960-1400-4dc6-9f2b-78afb7492a8d> lazy-loaded attributes: resources,numa_topology {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1672.722337] env[63371]: DEBUG nova.objects.instance [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'flavor' on Instance uuid b523486c-adae-4322-80be-1f3bf33ca192 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1672.728936] env[63371]: DEBUG oslo_vmware.api [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774453, 'name': PowerOnVM_Task, 'duration_secs': 0.800322} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.729306] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1672.729533] env[63371]: INFO nova.compute.manager [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Took 16.73 seconds to spawn the instance on the hypervisor. [ 1672.729670] env[63371]: DEBUG nova.compute.manager [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1672.730456] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a8cef4-d41b-4884-b835-e6d809e34472 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.031107] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774457, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.034998] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a618e0b6-0294-435e-b731-189276e0efa2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.042760] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9375b904-8129-4261-8f1f-49ef743ef829 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.079258] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82c7aaa-a25e-4999-9d85-73c7c9ae092e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.088640] env[63371]: DEBUG oslo_vmware.api [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154014} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.091011] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1673.094011] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1673.094011] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1673.094011] env[63371]: INFO nova.compute.manager [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Took 1.40 seconds to destroy the instance on the hypervisor. [ 1673.094011] env[63371]: DEBUG oslo.service.loopingcall [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.094011] env[63371]: DEBUG nova.compute.manager [-] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1673.094011] env[63371]: DEBUG nova.network.neutron [-] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1673.096552] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ffe935-eeb4-4a07-9a27-ce658243517a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.112680] env[63371]: DEBUG nova.compute.provider_tree [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1673.235550] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0183ad0-b18a-4aac-898a-2994ed10b199 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.913s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.255981] env[63371]: INFO nova.compute.manager [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Took 31.11 seconds to build instance. [ 1673.481582] env[63371]: DEBUG nova.virt.hardware [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1673.481838] env[63371]: DEBUG nova.virt.hardware [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1673.482045] env[63371]: DEBUG nova.virt.hardware [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1673.482247] env[63371]: DEBUG nova.virt.hardware [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1673.482397] env[63371]: DEBUG nova.virt.hardware [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1673.482556] env[63371]: DEBUG nova.virt.hardware [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1673.482767] env[63371]: DEBUG nova.virt.hardware [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1673.482942] env[63371]: DEBUG nova.virt.hardware [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1673.483134] env[63371]: DEBUG nova.virt.hardware [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1673.483304] env[63371]: DEBUG nova.virt.hardware [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1673.483474] env[63371]: DEBUG nova.virt.hardware [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1673.484669] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18285fca-940d-4dcd-b3cc-99d61077a023 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.492778] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5f8cc9c-62ca-4ff9-94c7-de978aa25375 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.507664] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1673.514320] env[63371]: DEBUG oslo.service.loopingcall [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.514320] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1673.514320] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b37c7afc-4313-4794-a2ee-1dc0eb853d3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.538659] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774457, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.540619] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1673.540619] env[63371]: value = "task-1774460" [ 1673.540619] env[63371]: _type = "Task" [ 1673.540619] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.549534] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774460, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.615806] env[63371]: DEBUG nova.scheduler.client.report [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1673.669076] env[63371]: DEBUG nova.compute.manager [req-69bd76ff-b715-497d-b539-4319dd1ec9f7 req-f72076cf-8f12-49ac-aed5-127a078b27a3 service nova] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Received event network-vif-deleted-b10b945f-6d31-4c0a-8698-336c1a98a865 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1673.669076] env[63371]: INFO nova.compute.manager [req-69bd76ff-b715-497d-b539-4319dd1ec9f7 req-f72076cf-8f12-49ac-aed5-127a078b27a3 service nova] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Neutron deleted interface b10b945f-6d31-4c0a-8698-336c1a98a865; detaching it from the instance and deleting it from the info cache [ 1673.669367] env[63371]: DEBUG nova.network.neutron [req-69bd76ff-b715-497d-b539-4319dd1ec9f7 req-f72076cf-8f12-49ac-aed5-127a078b27a3 service nova] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.722469] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cd42df89-7876-4001-ae38-4debe44d7edf tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "b523486c-adae-4322-80be-1f3bf33ca192" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.722745] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cd42df89-7876-4001-ae38-4debe44d7edf tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.722917] env[63371]: DEBUG nova.compute.manager [None req-cd42df89-7876-4001-ae38-4debe44d7edf tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1673.724798] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17a31a3f-b258-4913-ac56-79948383328d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.730908] env[63371]: DEBUG nova.compute.manager [None req-cd42df89-7876-4001-ae38-4debe44d7edf tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63371) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1673.731598] env[63371]: DEBUG nova.objects.instance [None req-cd42df89-7876-4001-ae38-4debe44d7edf tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'flavor' on Instance uuid b523486c-adae-4322-80be-1f3bf33ca192 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1673.757921] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a032f19e-d50e-45bc-a942-f7396757c3d8 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "ee3ea0ef-cde9-4326-b564-1aa216e00751" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.620s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.952877] env[63371]: DEBUG nova.network.neutron [-] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1674.051409] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774457, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.065731] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774460, 'name': CreateVM_Task, 'duration_secs': 0.444652} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.065731] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1674.066100] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.066100] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.066430] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1674.067601] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-856aa67f-0b79-4d41-9c73-2313edee6bf5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.072925] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1674.072925] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5241f18f-fa93-b820-d223-16153b5b22a6" [ 1674.072925] env[63371]: _type = "Task" [ 1674.072925] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.083211] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5241f18f-fa93-b820-d223-16153b5b22a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.122837] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.467s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.124957] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.705s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.125047] env[63371]: DEBUG nova.objects.instance [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Lazy-loading 'resources' on Instance uuid 36d5c00a-4762-4801-aff1-0a22e336730a {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1674.174312] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dc810bd1-f1f0-4fd6-880c-5a18ca57313e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.185350] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0704d5a-6ed1-40d6-9540-41495ca4c9f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.225152] env[63371]: DEBUG nova.compute.manager [req-69bd76ff-b715-497d-b539-4319dd1ec9f7 req-f72076cf-8f12-49ac-aed5-127a078b27a3 service nova] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Detach interface failed, port_id=b10b945f-6d31-4c0a-8698-336c1a98a865, reason: Instance 61a44b0c-86fc-4f1c-a102-61eaff509d20 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1674.240723] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd42df89-7876-4001-ae38-4debe44d7edf tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1674.240723] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56e76c51-b8fe-4d62-91db-08ee77e0056f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.248277] env[63371]: DEBUG oslo_vmware.api [None req-cd42df89-7876-4001-ae38-4debe44d7edf tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1674.248277] env[63371]: value = "task-1774461" [ 1674.248277] env[63371]: _type = "Task" [ 1674.248277] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.260552] env[63371]: DEBUG oslo_vmware.api [None req-cd42df89-7876-4001-ae38-4debe44d7edf tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774461, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.456510] env[63371]: INFO nova.compute.manager [-] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Took 1.36 seconds to deallocate network for instance. [ 1674.481795] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "ee3ea0ef-cde9-4326-b564-1aa216e00751" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.482122] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "ee3ea0ef-cde9-4326-b564-1aa216e00751" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.482435] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "ee3ea0ef-cde9-4326-b564-1aa216e00751-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.482536] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "ee3ea0ef-cde9-4326-b564-1aa216e00751-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.482697] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "ee3ea0ef-cde9-4326-b564-1aa216e00751-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.487156] env[63371]: INFO nova.compute.manager [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Terminating instance [ 1674.487979] env[63371]: DEBUG nova.compute.manager [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1674.488188] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1674.489307] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681181fb-3c18-42a6-aaf5-b2ae0e690e6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.501012] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1674.501231] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0d8bcfe7-24d7-43c7-9926-be27b2541081 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.513654] env[63371]: DEBUG oslo_vmware.api [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1674.513654] env[63371]: value = "task-1774462" [ 1674.513654] env[63371]: _type = "Task" [ 1674.513654] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.524387] env[63371]: DEBUG oslo_vmware.api [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774462, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.543034] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.545780] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.003s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.545994] env[63371]: DEBUG nova.compute.manager [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Going to confirm migration 3 {{(pid=63371) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1674.547707] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774457, 'name': CloneVM_Task, 'duration_secs': 2.183511} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.547707] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Created linked-clone VM from snapshot [ 1674.548549] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52d6ca5-553d-4683-a169-0c2aa58e31b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.558418] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Uploading image 92bc8616-7d1c-4553-a5b6-e579f1683538 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1674.588340] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5241f18f-fa93-b820-d223-16153b5b22a6, 'name': SearchDatastore_Task, 'duration_secs': 0.009429} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.590771] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.590926] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1674.591050] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.591199] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.592393] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1674.592393] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef4681c3-a0c0-42b2-8c59-ceeed200b742 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.600447] env[63371]: DEBUG oslo_vmware.rw_handles [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1674.600447] env[63371]: value = "vm-368424" [ 1674.600447] env[63371]: _type = "VirtualMachine" [ 1674.600447] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1674.600728] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-891e2607-1389-4fbb-861e-b3621ea02003 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.603428] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1674.603606] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1674.604969] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-005fe937-d78a-4bb8-8258-61ce2c76dee9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.610723] env[63371]: DEBUG oslo_vmware.rw_handles [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lease: (returnval){ [ 1674.610723] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524ac642-c999-bb27-6cd6-7ef2cbaaaabb" [ 1674.610723] env[63371]: _type = "HttpNfcLease" [ 1674.610723] env[63371]: } obtained for exporting VM: (result){ [ 1674.610723] env[63371]: value = "vm-368424" [ 1674.610723] env[63371]: _type = "VirtualMachine" [ 1674.610723] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1674.610723] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the lease: (returnval){ [ 1674.610723] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524ac642-c999-bb27-6cd6-7ef2cbaaaabb" [ 1674.610723] env[63371]: _type = "HttpNfcLease" [ 1674.610723] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1674.611975] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1674.611975] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ada276-449e-7fc1-a241-65394621a825" [ 1674.611975] env[63371]: _type = "Task" [ 1674.611975] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.636991] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1674.636991] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524ac642-c999-bb27-6cd6-7ef2cbaaaabb" [ 1674.636991] env[63371]: _type = "HttpNfcLease" [ 1674.636991] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1674.641210] env[63371]: DEBUG oslo_vmware.rw_handles [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1674.641210] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524ac642-c999-bb27-6cd6-7ef2cbaaaabb" [ 1674.641210] env[63371]: _type = "HttpNfcLease" [ 1674.641210] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1674.641806] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ada276-449e-7fc1-a241-65394621a825, 'name': SearchDatastore_Task, 'duration_secs': 0.009658} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.642317] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ad3963c9-502b-447f-8162-e9b2b8974449 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "40644960-1400-4dc6-9f2b-78afb7492a8d" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 33.606s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.643793] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aac174db-72ad-45a2-aacd-bcf7890029cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.648253] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "40644960-1400-4dc6-9f2b-78afb7492a8d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 13.228s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.648253] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "40644960-1400-4dc6-9f2b-78afb7492a8d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.648383] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "40644960-1400-4dc6-9f2b-78afb7492a8d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.648491] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "40644960-1400-4dc6-9f2b-78afb7492a8d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.650322] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2eeb067c-2bee-4ee5-b21b-02e1bfe93e81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.654644] env[63371]: INFO nova.compute.manager [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Terminating instance [ 1674.656660] env[63371]: DEBUG nova.compute.manager [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1674.656660] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1674.656660] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d954255e-c4dd-4fbf-8a2e-5c3a5ec50933 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.662844] env[63371]: DEBUG oslo_vmware.rw_handles [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525f9c5b-144f-a59d-efff-83ed1d8b4763/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1674.663520] env[63371]: DEBUG oslo_vmware.rw_handles [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525f9c5b-144f-a59d-efff-83ed1d8b4763/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1674.664647] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1674.664647] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52094084-aa5f-7c8b-96b0-47e1ee0c90ac" [ 1674.664647] env[63371]: _type = "Task" [ 1674.664647] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.732568] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9933d506-a258-4fbc-a864-3e29db6b79f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.753597] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52094084-aa5f-7c8b-96b0-47e1ee0c90ac, 'name': SearchDatastore_Task, 'duration_secs': 0.009991} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.757687] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.758040] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 33952466-3df7-4485-8e7a-ab3d6ec3f22c/33952466-3df7-4485-8e7a-ab3d6ec3f22c.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1674.758273] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-759cfcb7-59b2-4638-9471-4f152ecee357 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.765659] env[63371]: DEBUG oslo_vmware.api [None req-cd42df89-7876-4001-ae38-4debe44d7edf tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774461, 'name': PowerOffVM_Task, 'duration_secs': 0.194283} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.767402] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd42df89-7876-4001-ae38-4debe44d7edf tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1674.767402] env[63371]: DEBUG nova.compute.manager [None req-cd42df89-7876-4001-ae38-4debe44d7edf tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1674.767723] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1674.767723] env[63371]: value = "task-1774464" [ 1674.767723] env[63371]: _type = "Task" [ 1674.767723] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.785159] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6797da2-4d54-42f7-9c84-851fd1ad502b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.788675] env[63371]: WARNING nova.virt.vmwareapi.vmops [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 40644960-1400-4dc6-9f2b-78afb7492a8d could not be found. [ 1674.788891] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1674.789142] env[63371]: INFO nova.compute.manager [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Took 0.13 seconds to destroy the instance on the hypervisor. [ 1674.789454] env[63371]: DEBUG oslo.service.loopingcall [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1674.795477] env[63371]: DEBUG nova.compute.manager [-] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1674.795642] env[63371]: DEBUG nova.network.neutron [-] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1674.798437] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1c397aae-49b6-468a-a421-2029a1869d7b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.807997] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774464, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.968363] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.027292] env[63371]: DEBUG oslo_vmware.api [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774462, 'name': PowerOffVM_Task, 'duration_secs': 0.242348} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.027629] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1675.027799] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1675.028075] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0200be9d-1d98-49cc-9138-ec49cbcd5783 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.168055] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1675.168343] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1675.168528] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleting the datastore file [datastore1] ee3ea0ef-cde9-4326-b564-1aa216e00751 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1675.168812] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a03021d3-0bf1-4d76-9a34-096a0b8c6c5c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.178695] env[63371]: DEBUG oslo_vmware.api [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1675.178695] env[63371]: value = "task-1774466" [ 1675.178695] env[63371]: _type = "Task" [ 1675.178695] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.190965] env[63371]: DEBUG oslo_vmware.api [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774466, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.244268] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b32a2db-162d-40ab-962b-ad07c09cd923 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.257703] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e66d7d9-b7ee-495b-9ae3-50f1e9d27480 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.305594] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e589cc-5f07-42ef-9e6c-c6a01314d78a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.314631] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cd42df89-7876-4001-ae38-4debe44d7edf tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.591s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.324742] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774464, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521587} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.326766] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1721466-d36f-4925-8c6e-4aaed8f04d07 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.329685] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 33952466-3df7-4485-8e7a-ab3d6ec3f22c/33952466-3df7-4485-8e7a-ab3d6ec3f22c.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1675.330013] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1675.331379] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a453041b-17a9-411e-be10-ada36d46ab0e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.345601] env[63371]: DEBUG nova.compute.provider_tree [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1675.348482] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1675.348482] env[63371]: value = "task-1774467" [ 1675.348482] env[63371]: _type = "Task" [ 1675.348482] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.358243] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774467, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.415337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1675.415417] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.415556] env[63371]: DEBUG nova.network.neutron [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1675.415747] env[63371]: DEBUG nova.objects.instance [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lazy-loading 'info_cache' on Instance uuid e1bc4623-f6b5-4440-a58d-594e9cbe3628 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1675.434569] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.435505] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.435505] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1675.435505] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.435505] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.437614] env[63371]: INFO nova.compute.manager [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Terminating instance [ 1675.444208] env[63371]: DEBUG nova.compute.manager [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1675.444422] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1675.445428] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e7563ca-ee71-4dfe-89e5-c29de9abf5ab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.454465] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1675.455521] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0640909f-2426-4fa6-8465-c789b608852c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.463504] env[63371]: DEBUG oslo_vmware.api [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1675.463504] env[63371]: value = "task-1774468" [ 1675.463504] env[63371]: _type = "Task" [ 1675.463504] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.472757] env[63371]: DEBUG oslo_vmware.api [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774468, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.691381] env[63371]: DEBUG oslo_vmware.api [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774466, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.322788} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.691785] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1675.692121] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1675.692477] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1675.692702] env[63371]: INFO nova.compute.manager [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1675.692944] env[63371]: DEBUG oslo.service.loopingcall [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1675.693197] env[63371]: DEBUG nova.compute.manager [-] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1675.694822] env[63371]: DEBUG nova.network.neutron [-] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1675.811030] env[63371]: DEBUG nova.network.neutron [-] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.850541] env[63371]: DEBUG nova.scheduler.client.report [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1675.867462] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774467, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105765} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.867776] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1675.869069] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a30ec5f-db77-41cf-a00f-435800a2b416 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.900148] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 33952466-3df7-4485-8e7a-ab3d6ec3f22c/33952466-3df7-4485-8e7a-ab3d6ec3f22c.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1675.900148] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1808e243-bee0-488d-91fb-1fde0b002649 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.921992] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1675.921992] env[63371]: value = "task-1774469" [ 1675.921992] env[63371]: _type = "Task" [ 1675.921992] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.932130] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774469, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.975031] env[63371]: DEBUG oslo_vmware.api [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774468, 'name': PowerOffVM_Task, 'duration_secs': 0.302131} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.975411] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1675.975840] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1675.976100] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b5ebc25-3d02-4381-8ba5-f44159683a0c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.159422] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1676.160124] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1676.160124] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Deleting the datastore file [datastore1] fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1676.161194] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3161d8b0-b8a4-44ae-9c55-83a7adbe1b26 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.168459] env[63371]: DEBUG oslo_vmware.api [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for the task: (returnval){ [ 1676.168459] env[63371]: value = "task-1774471" [ 1676.168459] env[63371]: _type = "Task" [ 1676.168459] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.181250] env[63371]: DEBUG oslo_vmware.api [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774471, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.264427] env[63371]: DEBUG nova.compute.manager [req-161a5e3d-0720-429f-a739-84481eb2b591 req-19fba267-4609-4a82-ab74-690c90eed1b0 service nova] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Received event network-vif-deleted-d78faddd-f19f-47ac-bee0-dd2e71ad5e42 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1676.264784] env[63371]: INFO nova.compute.manager [req-161a5e3d-0720-429f-a739-84481eb2b591 req-19fba267-4609-4a82-ab74-690c90eed1b0 service nova] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Neutron deleted interface d78faddd-f19f-47ac-bee0-dd2e71ad5e42; detaching it from the instance and deleting it from the info cache [ 1676.265236] env[63371]: DEBUG nova.network.neutron [req-161a5e3d-0720-429f-a739-84481eb2b591 req-19fba267-4609-4a82-ab74-690c90eed1b0 service nova] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.314929] env[63371]: INFO nova.compute.manager [-] [instance: 40644960-1400-4dc6-9f2b-78afb7492a8d] Took 1.52 seconds to deallocate network for instance. [ 1676.362084] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.237s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.364961] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.989s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.366803] env[63371]: INFO nova.compute.claims [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1676.396040] env[63371]: INFO nova.scheduler.client.report [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Deleted allocations for instance 36d5c00a-4762-4801-aff1-0a22e336730a [ 1676.440976] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774469, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.484997] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquiring lock "3da99cec-409f-4ea0-891c-2e9d7429674d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1676.485317] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lock "3da99cec-409f-4ea0-891c-2e9d7429674d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.638404] env[63371]: DEBUG nova.network.neutron [-] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.683867] env[63371]: DEBUG oslo_vmware.api [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Task: {'id': task-1774471, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.335972} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.684281] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1676.684483] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1676.684762] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1676.684992] env[63371]: INFO nova.compute.manager [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1676.685390] env[63371]: DEBUG oslo.service.loopingcall [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1676.685687] env[63371]: DEBUG nova.compute.manager [-] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1676.685844] env[63371]: DEBUG nova.network.neutron [-] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1676.739568] env[63371]: DEBUG nova.objects.instance [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'flavor' on Instance uuid b523486c-adae-4322-80be-1f3bf33ca192 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1676.758118] env[63371]: DEBUG nova.network.neutron [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance_info_cache with network_info: [{"id": "826bbbf2-7d7e-47d0-9516-4cb91c3d94a7", "address": "fa:16:3e:67:7c:99", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap826bbbf2-7d", "ovs_interfaceid": "826bbbf2-7d7e-47d0-9516-4cb91c3d94a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.768354] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1c10622-ec15-430b-8817-678ddf7b5c95 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.781269] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0cc0d3-6359-4b6d-8eac-607f087ef5ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.828039] env[63371]: DEBUG nova.compute.manager [req-161a5e3d-0720-429f-a739-84481eb2b591 req-19fba267-4609-4a82-ab74-690c90eed1b0 service nova] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Detach interface failed, port_id=d78faddd-f19f-47ac-bee0-dd2e71ad5e42, reason: Instance ee3ea0ef-cde9-4326-b564-1aa216e00751 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1676.909899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2ecfc52a-912b-4d74-b4a5-b6c016e2da6b tempest-ServerMetadataNegativeTestJSON-698816723 tempest-ServerMetadataNegativeTestJSON-698816723-project-member] Lock "36d5c00a-4762-4801-aff1-0a22e336730a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.488s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.946990] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774469, 'name': ReconfigVM_Task, 'duration_secs': 0.661013} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.947443] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 33952466-3df7-4485-8e7a-ab3d6ec3f22c/33952466-3df7-4485-8e7a-ab3d6ec3f22c.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1676.948419] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f452755c-51e5-4f9f-bfc6-dac871590ae8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.955307] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1676.955307] env[63371]: value = "task-1774472" [ 1676.955307] env[63371]: _type = "Task" [ 1676.955307] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.965774] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774472, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.989159] env[63371]: DEBUG nova.compute.manager [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1677.141288] env[63371]: INFO nova.compute.manager [-] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Took 1.45 seconds to deallocate network for instance. [ 1677.253042] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1677.253042] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.253042] env[63371]: DEBUG nova.network.neutron [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1677.253042] env[63371]: DEBUG nova.objects.instance [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'info_cache' on Instance uuid b523486c-adae-4322-80be-1f3bf33ca192 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1677.262202] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "refresh_cache-e1bc4623-f6b5-4440-a58d-594e9cbe3628" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.262202] env[63371]: DEBUG nova.objects.instance [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lazy-loading 'migration_context' on Instance uuid e1bc4623-f6b5-4440-a58d-594e9cbe3628 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1677.351693] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5615c6aa-1ecf-4969-a59e-98b019fb79dd tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "40644960-1400-4dc6-9f2b-78afb7492a8d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.704s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.471270] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774472, 'name': Rename_Task, 'duration_secs': 0.189222} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.471742] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1677.472228] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78add094-d9b1-455b-9638-53d6d13de7e4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.480130] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1677.480130] env[63371]: value = "task-1774473" [ 1677.480130] env[63371]: _type = "Task" [ 1677.480130] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.491995] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774473, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.521641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.580768] env[63371]: DEBUG nova.network.neutron [-] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1677.647893] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.719325] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20e6f83-8871-4a43-9d2c-e3f286ab3a9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.730842] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4c9e22-512b-44ed-8e41-65540d3a5b30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.768545] env[63371]: DEBUG nova.objects.base [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1677.770125] env[63371]: DEBUG nova.objects.base [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1677.771507] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c4668f-3137-45a4-b78b-d43d32656ae2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.775354] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f2e6e44-3509-46e1-afc0-b1e760ad59be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.798052] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-314f7c19-7784-43e7-8d6d-f540b122b3df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.802180] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d025d47c-6c02-4c6a-9271-85b9fe1439b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.816218] env[63371]: DEBUG nova.compute.provider_tree [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1677.818900] env[63371]: DEBUG oslo_vmware.api [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1677.818900] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5202311e-4954-0258-7f14-dd29ddd7db74" [ 1677.818900] env[63371]: _type = "Task" [ 1677.818900] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.829170] env[63371]: DEBUG oslo_vmware.api [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5202311e-4954-0258-7f14-dd29ddd7db74, 'name': SearchDatastore_Task, 'duration_secs': 0.007781} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.829531] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1677.994171] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774473, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.083203] env[63371]: INFO nova.compute.manager [-] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Took 1.40 seconds to deallocate network for instance. [ 1678.094970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.095294] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.166327] env[63371]: DEBUG nova.compute.manager [req-2453c530-b0fe-4eee-b230-f9deb57b2ac4 req-9f9ac405-a3f7-4eb8-83e5-8ef733ad071e service nova] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Received event network-vif-deleted-df0bcb02-c7dd-42fe-96f3-e45841fdf782 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1678.323051] env[63371]: DEBUG nova.scheduler.client.report [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1678.493463] env[63371]: DEBUG oslo_vmware.api [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774473, 'name': PowerOnVM_Task, 'duration_secs': 0.91077} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.493747] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1678.493943] env[63371]: DEBUG nova.compute.manager [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1678.494766] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53740e7-c48c-4a53-8685-3df4fa2ea6f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.596910] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1678.597312] env[63371]: DEBUG nova.compute.manager [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1678.827359] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.827359] env[63371]: DEBUG nova.compute.manager [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1678.831788] env[63371]: DEBUG nova.network.neutron [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updating instance_info_cache with network_info: [{"id": "993ff886-27f6-48cd-be00-f0e8d292b060", "address": "fa:16:3e:14:89:81", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993ff886-27", "ovs_interfaceid": "993ff886-27f6-48cd-be00-f0e8d292b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.832782] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.865s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1678.833052] env[63371]: DEBUG nova.objects.instance [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lazy-loading 'resources' on Instance uuid 61a44b0c-86fc-4f1c-a102-61eaff509d20 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1679.014893] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.132830] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.339467] env[63371]: DEBUG nova.compute.utils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1679.341162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Releasing lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.345858] env[63371]: DEBUG nova.compute.manager [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1679.346471] env[63371]: DEBUG nova.network.neutron [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1679.474085] env[63371]: DEBUG nova.policy [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cfea05a674d48609376c79b8cce2f19', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4fc21e96c1ad4c00984b1d6b832af490', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1679.698839] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5997dc-5ab5-442a-aec5-682d2defb6fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.707378] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3212a11-4fdf-4a41-8ae4-0a7efc211fb2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.743365] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66501ce-e027-463d-a7fe-7a0bc44a3dba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.753957] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae583a2-32a0-49ce-bd3f-43426e6544f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.767977] env[63371]: DEBUG nova.compute.provider_tree [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1679.831434] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquiring lock "33952466-3df7-4485-8e7a-ab3d6ec3f22c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.834029] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lock "33952466-3df7-4485-8e7a-ab3d6ec3f22c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.834029] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquiring lock "33952466-3df7-4485-8e7a-ab3d6ec3f22c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.834029] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lock "33952466-3df7-4485-8e7a-ab3d6ec3f22c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.834029] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lock "33952466-3df7-4485-8e7a-ab3d6ec3f22c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.835101] env[63371]: INFO nova.compute.manager [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Terminating instance [ 1679.836692] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquiring lock "refresh_cache-33952466-3df7-4485-8e7a-ab3d6ec3f22c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.836962] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquired lock "refresh_cache-33952466-3df7-4485-8e7a-ab3d6ec3f22c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.837041] env[63371]: DEBUG nova.network.neutron [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1679.845974] env[63371]: DEBUG nova.compute.manager [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1679.852431] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1679.854342] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e5c6c42-5528-4b49-94b7-4d192d8a6f36 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.862382] env[63371]: DEBUG oslo_vmware.api [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1679.862382] env[63371]: value = "task-1774474" [ 1679.862382] env[63371]: _type = "Task" [ 1679.862382] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.869806] env[63371]: DEBUG oslo_vmware.api [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774474, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.149963] env[63371]: DEBUG nova.network.neutron [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Successfully created port: 18a0dce6-a0d5-44e7-85a3-d54e70aa89b3 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1680.271649] env[63371]: DEBUG nova.scheduler.client.report [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1680.373045] env[63371]: DEBUG nova.network.neutron [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1680.383122] env[63371]: DEBUG oslo_vmware.api [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774474, 'name': PowerOnVM_Task, 'duration_secs': 0.475328} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.383122] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1680.383122] env[63371]: DEBUG nova.compute.manager [None req-2465ca92-24d9-4e4a-9dda-d15d18af6edb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1680.384750] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f5bff6a-9da9-4db4-9d75-a3025787924a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.481706] env[63371]: DEBUG nova.network.neutron [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.780188] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.946s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.785095] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.261s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1680.785810] env[63371]: INFO nova.compute.claims [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1680.825762] env[63371]: INFO nova.scheduler.client.report [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted allocations for instance 61a44b0c-86fc-4f1c-a102-61eaff509d20 [ 1680.865751] env[63371]: DEBUG nova.compute.manager [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1680.922294] env[63371]: DEBUG nova.virt.hardware [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1680.922784] env[63371]: DEBUG nova.virt.hardware [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1680.923063] env[63371]: DEBUG nova.virt.hardware [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1680.923344] env[63371]: DEBUG nova.virt.hardware [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1680.923552] env[63371]: DEBUG nova.virt.hardware [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1680.923761] env[63371]: DEBUG nova.virt.hardware [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1680.924069] env[63371]: DEBUG nova.virt.hardware [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1680.924379] env[63371]: DEBUG nova.virt.hardware [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1680.924589] env[63371]: DEBUG nova.virt.hardware [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1680.924824] env[63371]: DEBUG nova.virt.hardware [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1680.925137] env[63371]: DEBUG nova.virt.hardware [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1680.926458] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e7a7ac-76de-44ec-95c3-9763dbcbdb18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.936281] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdd37f37-da09-48d1-bbb2-1c80ea739414 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.988499] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Releasing lock "refresh_cache-33952466-3df7-4485-8e7a-ab3d6ec3f22c" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.988925] env[63371]: DEBUG nova.compute.manager [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1680.989145] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1680.990073] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb70270c-6dae-414a-8b33-e2967eea050a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.999177] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1680.999660] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-062fb569-b433-451e-baae-d48141d0e0bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.006196] env[63371]: DEBUG oslo_vmware.api [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1681.006196] env[63371]: value = "task-1774475" [ 1681.006196] env[63371]: _type = "Task" [ 1681.006196] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.018716] env[63371]: DEBUG oslo_vmware.api [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774475, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.337783] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f7c738ed-110e-41cb-82cb-c072a6aa5584 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "61a44b0c-86fc-4f1c-a102-61eaff509d20" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.663s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.517485] env[63371]: DEBUG oslo_vmware.api [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774475, 'name': PowerOffVM_Task, 'duration_secs': 0.272302} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.517827] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1681.518045] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1681.518335] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e0d05a3-839d-4ced-9e5e-326285f4dd28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.547991] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1681.547991] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1681.547991] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Deleting the datastore file [datastore1] 33952466-3df7-4485-8e7a-ab3d6ec3f22c {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1681.552977] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d785033d-d97e-4b4c-98b2-943aba813f79 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.556312] env[63371]: DEBUG oslo_vmware.api [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for the task: (returnval){ [ 1681.556312] env[63371]: value = "task-1774477" [ 1681.556312] env[63371]: _type = "Task" [ 1681.556312] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.565097] env[63371]: DEBUG oslo_vmware.api [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.064660] env[63371]: DEBUG nova.compute.manager [req-3dc9e26e-73a1-4089-8df4-179854317197 req-b872cf55-86d8-4b62-9085-310b17c7c8f9 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Received event network-vif-plugged-18a0dce6-a0d5-44e7-85a3-d54e70aa89b3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1682.064660] env[63371]: DEBUG oslo_concurrency.lockutils [req-3dc9e26e-73a1-4089-8df4-179854317197 req-b872cf55-86d8-4b62-9085-310b17c7c8f9 service nova] Acquiring lock "c04edf6d-8a07-4776-be0f-b763fb3059d2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.064660] env[63371]: DEBUG oslo_concurrency.lockutils [req-3dc9e26e-73a1-4089-8df4-179854317197 req-b872cf55-86d8-4b62-9085-310b17c7c8f9 service nova] Lock "c04edf6d-8a07-4776-be0f-b763fb3059d2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.064660] env[63371]: DEBUG oslo_concurrency.lockutils [req-3dc9e26e-73a1-4089-8df4-179854317197 req-b872cf55-86d8-4b62-9085-310b17c7c8f9 service nova] Lock "c04edf6d-8a07-4776-be0f-b763fb3059d2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.064660] env[63371]: DEBUG nova.compute.manager [req-3dc9e26e-73a1-4089-8df4-179854317197 req-b872cf55-86d8-4b62-9085-310b17c7c8f9 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] No waiting events found dispatching network-vif-plugged-18a0dce6-a0d5-44e7-85a3-d54e70aa89b3 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1682.068549] env[63371]: WARNING nova.compute.manager [req-3dc9e26e-73a1-4089-8df4-179854317197 req-b872cf55-86d8-4b62-9085-310b17c7c8f9 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Received unexpected event network-vif-plugged-18a0dce6-a0d5-44e7-85a3-d54e70aa89b3 for instance with vm_state building and task_state spawning. [ 1682.074452] env[63371]: DEBUG nova.network.neutron [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Successfully updated port: 18a0dce6-a0d5-44e7-85a3-d54e70aa89b3 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1682.079739] env[63371]: DEBUG oslo_vmware.api [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Task: {'id': task-1774477, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.235123} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.079981] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1682.080193] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1682.080404] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1682.080727] env[63371]: INFO nova.compute.manager [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1682.081285] env[63371]: DEBUG oslo.service.loopingcall [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1682.081906] env[63371]: DEBUG nova.compute.manager [-] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1682.082056] env[63371]: DEBUG nova.network.neutron [-] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1682.110258] env[63371]: DEBUG nova.network.neutron [-] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1682.159993] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0783e2d5-4c62-4639-828b-d61d02ec7fb7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.168760] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4eff7d-2dc5-4992-9d0d-0a6d3761211c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.199614] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fb3db8-a778-4e77-9bb4-c80407f4bc43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.208778] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e71e797-3eab-4250-bdd9-016ae9b6aca0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.224908] env[63371]: DEBUG nova.compute.provider_tree [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1682.449395] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "cf63c2a2-ee72-464e-944d-5e53ca8635ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.449644] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "cf63c2a2-ee72-464e-944d-5e53ca8635ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.478127] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Acquiring lock "6f31d6ad-480d-40dd-924e-f6277d93c99a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.478359] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Lock "6f31d6ad-480d-40dd-924e-f6277d93c99a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.577538] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Acquiring lock "refresh_cache-c04edf6d-8a07-4776-be0f-b763fb3059d2" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1682.577697] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Acquired lock "refresh_cache-c04edf6d-8a07-4776-be0f-b763fb3059d2" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1682.578053] env[63371]: DEBUG nova.network.neutron [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1682.614034] env[63371]: DEBUG nova.network.neutron [-] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1682.753037] env[63371]: ERROR nova.scheduler.client.report [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [req-1c37e7c3-e8c1-48c0-84c5-e452279aa9b9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1c37e7c3-e8c1-48c0-84c5-e452279aa9b9"}]} [ 1682.770617] env[63371]: DEBUG nova.scheduler.client.report [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1682.785295] env[63371]: DEBUG nova.scheduler.client.report [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1682.785553] env[63371]: DEBUG nova.compute.provider_tree [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1682.797636] env[63371]: DEBUG nova.scheduler.client.report [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1682.817120] env[63371]: DEBUG nova.scheduler.client.report [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1682.952270] env[63371]: DEBUG nova.compute.manager [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1682.980430] env[63371]: DEBUG nova.compute.manager [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1683.117217] env[63371]: INFO nova.compute.manager [-] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Took 1.04 seconds to deallocate network for instance. [ 1683.119766] env[63371]: DEBUG nova.network.neutron [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1683.158396] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfe58db-26be-4229-a2f7-33976e35f731 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.168355] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1955b362-bf4f-4e50-b476-54b812ac8d73 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.205634] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ed5cce3-be18-4ead-af32-e9abf67ae759 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.215100] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99f2f454-1b3f-43b5-ba7f-ad0287c1d86b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.229681] env[63371]: DEBUG nova.compute.provider_tree [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1683.368977] env[63371]: DEBUG nova.network.neutron [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Updating instance_info_cache with network_info: [{"id": "18a0dce6-a0d5-44e7-85a3-d54e70aa89b3", "address": "fa:16:3e:ec:10:4a", "network": {"id": "84ced2d0-442c-416c-9d38-8d6606bd9f77", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1880659540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fc21e96c1ad4c00984b1d6b832af490", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18a0dce6-a0", "ovs_interfaceid": "18a0dce6-a0d5-44e7-85a3-d54e70aa89b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1683.478940] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.501844] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.629248] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1683.766736] env[63371]: DEBUG nova.scheduler.client.report [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 117 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1683.767238] env[63371]: DEBUG nova.compute.provider_tree [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 117 to 118 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1683.767776] env[63371]: DEBUG nova.compute.provider_tree [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1683.871381] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Releasing lock "refresh_cache-c04edf6d-8a07-4776-be0f-b763fb3059d2" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1683.871723] env[63371]: DEBUG nova.compute.manager [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Instance network_info: |[{"id": "18a0dce6-a0d5-44e7-85a3-d54e70aa89b3", "address": "fa:16:3e:ec:10:4a", "network": {"id": "84ced2d0-442c-416c-9d38-8d6606bd9f77", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1880659540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fc21e96c1ad4c00984b1d6b832af490", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18a0dce6-a0", "ovs_interfaceid": "18a0dce6-a0d5-44e7-85a3-d54e70aa89b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1683.872295] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:10:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b83383f-ed7a-4efd-aef7-aa8c15649d07', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18a0dce6-a0d5-44e7-85a3-d54e70aa89b3', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1683.881407] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Creating folder: Project (4fc21e96c1ad4c00984b1d6b832af490). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1683.883811] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-193c1682-8ce4-4a9e-ad0d-eb7ffe89c185 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.891996] env[63371]: DEBUG oslo_vmware.rw_handles [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525f9c5b-144f-a59d-efff-83ed1d8b4763/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1683.892942] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc78b2c-3777-4660-abc8-9e1e07121be0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.897071] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Created folder: Project (4fc21e96c1ad4c00984b1d6b832af490) in parent group-v368199. [ 1683.897173] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Creating folder: Instances. Parent ref: group-v368426. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1683.897754] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3adfbac9-289a-439e-965e-402864a4bc35 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.901230] env[63371]: DEBUG oslo_vmware.rw_handles [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525f9c5b-144f-a59d-efff-83ed1d8b4763/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1683.901394] env[63371]: ERROR oslo_vmware.rw_handles [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525f9c5b-144f-a59d-efff-83ed1d8b4763/disk-0.vmdk due to incomplete transfer. [ 1683.901606] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-e9d7b801-2b17-4e02-82d3-3c0e1deb5d3f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.908585] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Created folder: Instances in parent group-v368426. [ 1683.909270] env[63371]: DEBUG oslo.service.loopingcall [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.909270] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1683.909422] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-662fda4d-b3d1-48f6-a9e0-cac3eb2ceb59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.930319] env[63371]: DEBUG oslo_vmware.rw_handles [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525f9c5b-144f-a59d-efff-83ed1d8b4763/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1683.931197] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Uploaded image 92bc8616-7d1c-4553-a5b6-e579f1683538 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1683.932705] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1683.933379] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-69a611fa-e17a-40da-9b0e-a599ab4db846 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.939351] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1683.939351] env[63371]: value = "task-1774480" [ 1683.939351] env[63371]: _type = "Task" [ 1683.939351] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.944009] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1683.944009] env[63371]: value = "task-1774481" [ 1683.944009] env[63371]: _type = "Task" [ 1683.944009] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.951485] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774480, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.956818] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774481, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.088901] env[63371]: DEBUG nova.compute.manager [req-9cfaa48a-242c-4fd3-8d81-8ac61f043a9c req-bd1e35bd-829c-40c1-8755-974de5578eb8 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Received event network-changed-18a0dce6-a0d5-44e7-85a3-d54e70aa89b3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1684.089210] env[63371]: DEBUG nova.compute.manager [req-9cfaa48a-242c-4fd3-8d81-8ac61f043a9c req-bd1e35bd-829c-40c1-8755-974de5578eb8 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Refreshing instance network info cache due to event network-changed-18a0dce6-a0d5-44e7-85a3-d54e70aa89b3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1684.089449] env[63371]: DEBUG oslo_concurrency.lockutils [req-9cfaa48a-242c-4fd3-8d81-8ac61f043a9c req-bd1e35bd-829c-40c1-8755-974de5578eb8 service nova] Acquiring lock "refresh_cache-c04edf6d-8a07-4776-be0f-b763fb3059d2" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.089594] env[63371]: DEBUG oslo_concurrency.lockutils [req-9cfaa48a-242c-4fd3-8d81-8ac61f043a9c req-bd1e35bd-829c-40c1-8755-974de5578eb8 service nova] Acquired lock "refresh_cache-c04edf6d-8a07-4776-be0f-b763fb3059d2" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.089753] env[63371]: DEBUG nova.network.neutron [req-9cfaa48a-242c-4fd3-8d81-8ac61f043a9c req-bd1e35bd-829c-40c1-8755-974de5578eb8 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Refreshing network info cache for port 18a0dce6-a0d5-44e7-85a3-d54e70aa89b3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1684.275449] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.493s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.275974] env[63371]: DEBUG nova.compute.manager [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1684.278539] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.631s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.278757] env[63371]: DEBUG nova.objects.instance [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lazy-loading 'resources' on Instance uuid ee3ea0ef-cde9-4326-b564-1aa216e00751 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1684.457612] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774481, 'name': Destroy_Task, 'duration_secs': 0.491348} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.462400] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Destroyed the VM [ 1684.462893] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1684.462993] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774480, 'name': CreateVM_Task, 'duration_secs': 0.474171} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.463215] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3be93395-f2c2-47a3-8693-026bc55befe4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.465380] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1684.466237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.466458] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.466864] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1684.467590] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25afc0fa-ec54-4599-9e0f-90e543651569 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1684.472903] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1684.472903] env[63371]: value = "task-1774482" [ 1684.472903] env[63371]: _type = "Task" [ 1684.472903] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.474218] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Waiting for the task: (returnval){ [ 1684.474218] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523d9e0e-e668-1a1a-f8b8-753d7e67c195" [ 1684.474218] env[63371]: _type = "Task" [ 1684.474218] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1684.486120] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774482, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.489448] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523d9e0e-e668-1a1a-f8b8-753d7e67c195, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.784236] env[63371]: DEBUG nova.compute.utils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1684.788337] env[63371]: DEBUG nova.compute.manager [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Not allocating networking since 'none' was specified. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1684.850504] env[63371]: DEBUG nova.network.neutron [req-9cfaa48a-242c-4fd3-8d81-8ac61f043a9c req-bd1e35bd-829c-40c1-8755-974de5578eb8 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Updated VIF entry in instance network info cache for port 18a0dce6-a0d5-44e7-85a3-d54e70aa89b3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1684.850958] env[63371]: DEBUG nova.network.neutron [req-9cfaa48a-242c-4fd3-8d81-8ac61f043a9c req-bd1e35bd-829c-40c1-8755-974de5578eb8 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Updating instance_info_cache with network_info: [{"id": "18a0dce6-a0d5-44e7-85a3-d54e70aa89b3", "address": "fa:16:3e:ec:10:4a", "network": {"id": "84ced2d0-442c-416c-9d38-8d6606bd9f77", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1880659540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fc21e96c1ad4c00984b1d6b832af490", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18a0dce6-a0", "ovs_interfaceid": "18a0dce6-a0d5-44e7-85a3-d54e70aa89b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1684.990345] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774482, 'name': RemoveSnapshot_Task} progress is 35%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.996450] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523d9e0e-e668-1a1a-f8b8-753d7e67c195, 'name': SearchDatastore_Task, 'duration_secs': 0.021845} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.996981] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1684.997287] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1684.997548] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1684.997798] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1684.998153] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1684.998939] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11705496-07df-4000-a142-54cba39dac38 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.013457] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1685.013664] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1685.014456] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-953013a4-0e90-49f1-9e73-3174b839be1e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.022531] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Waiting for the task: (returnval){ [ 1685.022531] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52db1992-c6b5-7407-739c-9ea2c3df140e" [ 1685.022531] env[63371]: _type = "Task" [ 1685.022531] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.031633] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52db1992-c6b5-7407-739c-9ea2c3df140e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.136788] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a382f8a-44ff-49de-b69c-c987721efadc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.145380] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ae025c-de6b-4c98-913b-11d71d49da7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.178164] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ca4f42-49c9-4f94-986a-d71ec0c3943d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.186833] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4e5586-932d-41d2-932a-2ce9c95bfe87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.201839] env[63371]: DEBUG nova.compute.provider_tree [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1685.289137] env[63371]: DEBUG nova.compute.manager [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1685.353770] env[63371]: DEBUG oslo_concurrency.lockutils [req-9cfaa48a-242c-4fd3-8d81-8ac61f043a9c req-bd1e35bd-829c-40c1-8755-974de5578eb8 service nova] Releasing lock "refresh_cache-c04edf6d-8a07-4776-be0f-b763fb3059d2" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1685.487287] env[63371]: DEBUG oslo_vmware.api [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774482, 'name': RemoveSnapshot_Task, 'duration_secs': 0.560439} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.487639] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1685.487885] env[63371]: INFO nova.compute.manager [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Took 15.56 seconds to snapshot the instance on the hypervisor. [ 1685.535898] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52db1992-c6b5-7407-739c-9ea2c3df140e, 'name': SearchDatastore_Task, 'duration_secs': 0.021119} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.536647] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de98b04b-596c-49d5-9d9a-c7ae4e0a45bc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.546397] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Waiting for the task: (returnval){ [ 1685.546397] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cfb363-fce2-6fca-12b5-3b904d15a360" [ 1685.546397] env[63371]: _type = "Task" [ 1685.546397] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1685.556860] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cfb363-fce2-6fca-12b5-3b904d15a360, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1685.706632] env[63371]: DEBUG nova.scheduler.client.report [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1686.039854] env[63371]: DEBUG nova.compute.manager [None req-384107ff-c7a9-471d-9dd3-3dabb8324a87 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Found 1 images (rotation: 2) {{(pid=63371) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1686.047962] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "9985dbcd-4498-4629-aae5-5e1933307c50" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.048257] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "9985dbcd-4498-4629-aae5-5e1933307c50" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.048442] env[63371]: INFO nova.compute.manager [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Shelving [ 1686.062795] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cfb363-fce2-6fca-12b5-3b904d15a360, 'name': SearchDatastore_Task, 'duration_secs': 0.030084} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.063868] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1686.064167] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] c04edf6d-8a07-4776-be0f-b763fb3059d2/c04edf6d-8a07-4776-be0f-b763fb3059d2.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1686.064416] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d626b6f-f877-4faa-a51e-7039d16d7c84 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.071931] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Waiting for the task: (returnval){ [ 1686.071931] env[63371]: value = "task-1774483" [ 1686.071931] env[63371]: _type = "Task" [ 1686.071931] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.081866] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774483, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.213439] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.935s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.216190] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.387s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.237119] env[63371]: INFO nova.scheduler.client.report [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleted allocations for instance ee3ea0ef-cde9-4326-b564-1aa216e00751 [ 1686.298763] env[63371]: DEBUG nova.compute.manager [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1686.325890] env[63371]: DEBUG nova.virt.hardware [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1686.326160] env[63371]: DEBUG nova.virt.hardware [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1686.326429] env[63371]: DEBUG nova.virt.hardware [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1686.326500] env[63371]: DEBUG nova.virt.hardware [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1686.326627] env[63371]: DEBUG nova.virt.hardware [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1686.326767] env[63371]: DEBUG nova.virt.hardware [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1686.326978] env[63371]: DEBUG nova.virt.hardware [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1686.327142] env[63371]: DEBUG nova.virt.hardware [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1686.327311] env[63371]: DEBUG nova.virt.hardware [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1686.327472] env[63371]: DEBUG nova.virt.hardware [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1686.327640] env[63371]: DEBUG nova.virt.hardware [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1686.328522] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbd1160-5e5f-4182-96b4-19c6a7f78524 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.337834] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7930160c-d881-44e2-a513-034e75103e04 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.353877] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1686.359598] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Creating folder: Project (574515661d5345fea6de2f6e2ac3cdfd). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1686.360276] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c04f494-616b-4d0f-8931-30cb5b608454 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.376925] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Created folder: Project (574515661d5345fea6de2f6e2ac3cdfd) in parent group-v368199. [ 1686.377095] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Creating folder: Instances. Parent ref: group-v368429. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1686.377347] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da69ba77-50e2-4305-b839-cc0fed1ea44e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.387690] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Created folder: Instances in parent group-v368429. [ 1686.387948] env[63371]: DEBUG oslo.service.loopingcall [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1686.388200] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1686.388450] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f778fd64-e353-4c02-a493-cc64abaf73e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.407318] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1686.407318] env[63371]: value = "task-1774486" [ 1686.407318] env[63371]: _type = "Task" [ 1686.407318] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.420563] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774486, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.561122] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1686.561346] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac0770df-12ee-418a-a31e-ceaabe1e839d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.569302] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1686.569302] env[63371]: value = "task-1774487" [ 1686.569302] env[63371]: _type = "Task" [ 1686.569302] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.582363] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774483, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.586257] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774487, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.745217] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d9a90610-3666-430e-8edf-ab71e69274d4 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "ee3ea0ef-cde9-4326-b564-1aa216e00751" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.263s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.919411] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774486, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.969911] env[63371]: DEBUG nova.compute.manager [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1686.971047] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-971c5f8b-8ee9-44b4-ae9c-8015928e6660 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.062090] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae602e1-3da1-448f-bd85-499ffc3e3b77 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.086197] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ca1561-0d81-4a0b-83da-f8a2dc52ebcc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.096138] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774483, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.930067} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.096632] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774487, 'name': PowerOffVM_Task, 'duration_secs': 0.259387} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.097302] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] c04edf6d-8a07-4776-be0f-b763fb3059d2/c04edf6d-8a07-4776-be0f-b763fb3059d2.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1687.097450] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1687.097654] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1687.097903] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58186099-2971-43a9-bceb-85d0dae2a3d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.100500] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d99680e-1452-44fc-80ff-b87ad6c1d54a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.128513] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e083b609-a7c8-4f03-aa94-c138584b41f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.147867] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfad714-ae94-4a78-a379-ad726c027830 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.150852] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Waiting for the task: (returnval){ [ 1687.150852] env[63371]: value = "task-1774488" [ 1687.150852] env[63371]: _type = "Task" [ 1687.150852] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.159846] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97977b2d-c0d7-4a31-a70e-03a3fa60c5d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.169604] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774488, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.190927] env[63371]: DEBUG nova.compute.provider_tree [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1687.418132] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774486, 'name': CreateVM_Task, 'duration_secs': 0.759102} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.418498] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1687.418770] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.418930] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.419266] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1687.419516] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1901931-651c-4fde-80f0-fb4d227f1731 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.424627] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1687.424627] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce60bf-b98a-70bf-edb5-99bcc1d79131" [ 1687.424627] env[63371]: _type = "Task" [ 1687.424627] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.432732] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce60bf-b98a-70bf-edb5-99bcc1d79131, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.486038] env[63371]: INFO nova.compute.manager [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] instance snapshotting [ 1687.486725] env[63371]: DEBUG nova.objects.instance [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'flavor' on Instance uuid 9862b0f0-ccf6-4e69-9e78-cf864adaa65e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1687.574581] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "e781866e-9b26-47c7-b1a6-d6d9547bf2fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.574581] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "e781866e-9b26-47c7-b1a6-d6d9547bf2fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.574663] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "e781866e-9b26-47c7-b1a6-d6d9547bf2fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.574796] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "e781866e-9b26-47c7-b1a6-d6d9547bf2fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.576032] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "e781866e-9b26-47c7-b1a6-d6d9547bf2fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.578503] env[63371]: INFO nova.compute.manager [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Terminating instance [ 1687.580565] env[63371]: DEBUG nova.compute.manager [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1687.582329] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1687.582329] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc39f656-bfdd-4848-8d86-ebca6b350272 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.590294] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1687.591045] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a0510848-9c4e-49fc-ab80-1afbb9ba95d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.599488] env[63371]: DEBUG oslo_vmware.api [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1687.599488] env[63371]: value = "task-1774489" [ 1687.599488] env[63371]: _type = "Task" [ 1687.599488] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.665020] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774488, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07818} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.665020] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1687.665020] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062c5d3d-ec2a-43ac-8ec5-ec396b13ddf6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.667121] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1687.667382] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-02e220b7-f047-4335-8a4d-b114b16369bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.687010] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] c04edf6d-8a07-4776-be0f-b763fb3059d2/c04edf6d-8a07-4776-be0f-b763fb3059d2.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1687.688131] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce91f1e7-4860-4c7b-b664-c0b9c1498ab7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.704498] env[63371]: DEBUG nova.scheduler.client.report [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1687.707681] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1687.707681] env[63371]: value = "task-1774490" [ 1687.707681] env[63371]: _type = "Task" [ 1687.707681] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.714808] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Waiting for the task: (returnval){ [ 1687.714808] env[63371]: value = "task-1774491" [ 1687.714808] env[63371]: _type = "Task" [ 1687.714808] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.718665] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774490, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.728787] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774491, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.937125] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce60bf-b98a-70bf-edb5-99bcc1d79131, 'name': SearchDatastore_Task, 'duration_secs': 0.045605} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.937492] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1687.937759] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1687.938056] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.938260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.938512] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1687.938805] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66ab76ea-01ab-416e-8b81-e08e5b4ed089 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.949711] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1687.949911] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1687.950724] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5297e73-9d17-4e53-8ba3-ac1ebc0856aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.956888] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1687.956888] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5271c470-fa35-6331-9c45-01017b505c51" [ 1687.956888] env[63371]: _type = "Task" [ 1687.956888] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.966113] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5271c470-fa35-6331-9c45-01017b505c51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.994378] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4925e8cf-7f68-46bc-87e9-951e49b28b14 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.015697] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbf84adf-2d2f-4fd0-ae84-861172397d19 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.109891] env[63371]: DEBUG oslo_vmware.api [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774489, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.117808] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "935cf583-ecde-4a10-a773-6ff765e5bb49" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.119203] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "935cf583-ecde-4a10-a773-6ff765e5bb49" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.226145] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774490, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.232549] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774491, 'name': ReconfigVM_Task, 'duration_secs': 0.303175} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.232841] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Reconfigured VM instance instance-00000053 to attach disk [datastore1] c04edf6d-8a07-4776-be0f-b763fb3059d2/c04edf6d-8a07-4776-be0f-b763fb3059d2.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1688.233577] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7734512d-4243-4621-8cbe-f4ed15009221 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.241617] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Waiting for the task: (returnval){ [ 1688.241617] env[63371]: value = "task-1774492" [ 1688.241617] env[63371]: _type = "Task" [ 1688.241617] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.251339] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774492, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.467567] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5271c470-fa35-6331-9c45-01017b505c51, 'name': SearchDatastore_Task, 'duration_secs': 0.030332} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.468330] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b421b99c-2735-4721-91b5-b953d9a3a9cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.474511] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1688.474511] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522484fe-3fb5-ee45-bce8-45fc7772e54f" [ 1688.474511] env[63371]: _type = "Task" [ 1688.474511] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.482399] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522484fe-3fb5-ee45-bce8-45fc7772e54f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.527225] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1688.527571] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-fde555e0-0b85-47cb-aa67-8b80649c9ed0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.537175] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1688.537175] env[63371]: value = "task-1774493" [ 1688.537175] env[63371]: _type = "Task" [ 1688.537175] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.546325] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774493, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.610699] env[63371]: DEBUG oslo_vmware.api [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774489, 'name': PowerOffVM_Task, 'duration_secs': 0.818649} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.610967] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1688.611150] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1688.611412] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51d233f1-8b30-4244-ac60-232dabbd0ac5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.621661] env[63371]: DEBUG nova.compute.manager [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1688.718703] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.502s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1688.721588] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.125s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.722139] env[63371]: DEBUG nova.objects.instance [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lazy-loading 'resources' on Instance uuid fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1688.733659] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774490, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.735235] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1688.735922] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1688.735922] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleting the datastore file [datastore1] e781866e-9b26-47c7-b1a6-d6d9547bf2fd {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1688.736563] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a192e3fd-46e6-4ba6-ab3d-bb11e580607e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.751303] env[63371]: DEBUG oslo_vmware.api [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1688.751303] env[63371]: value = "task-1774495" [ 1688.751303] env[63371]: _type = "Task" [ 1688.751303] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.758795] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774492, 'name': Rename_Task, 'duration_secs': 0.166358} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.760067] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1688.760396] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e19757e-2a32-4bd5-90d4-2e3ceaa27fc8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.766357] env[63371]: DEBUG oslo_vmware.api [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774495, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.775304] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Waiting for the task: (returnval){ [ 1688.775304] env[63371]: value = "task-1774496" [ 1688.775304] env[63371]: _type = "Task" [ 1688.775304] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.786787] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774496, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.986351] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522484fe-3fb5-ee45-bce8-45fc7772e54f, 'name': SearchDatastore_Task, 'duration_secs': 0.014778} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.986644] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1688.986919] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3da99cec-409f-4ea0-891c-2e9d7429674d/3da99cec-409f-4ea0-891c-2e9d7429674d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1688.987204] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0b087591-9d41-4578-9e89-f198c5cf748d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.994847] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1688.994847] env[63371]: value = "task-1774497" [ 1688.994847] env[63371]: _type = "Task" [ 1688.994847] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.003536] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774497, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.048481] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774493, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.143811] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.235033] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774490, 'name': CreateSnapshot_Task, 'duration_secs': 1.043016} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.237430] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1689.238555] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85fd4ac-5cdd-4d38-a122-a07cff46d77d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.266101] env[63371]: DEBUG oslo_vmware.api [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774495, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176982} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.266712] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1689.267028] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1689.267198] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1689.267426] env[63371]: INFO nova.compute.manager [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Took 1.69 seconds to destroy the instance on the hypervisor. [ 1689.267756] env[63371]: DEBUG oslo.service.loopingcall [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1689.268012] env[63371]: DEBUG nova.compute.manager [-] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1689.268189] env[63371]: DEBUG nova.network.neutron [-] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1689.291936] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774496, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.293332] env[63371]: INFO nova.scheduler.client.report [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted allocation for migration 5c98d699-8a6c-49bb-bd05-0a06abecc138 [ 1689.509071] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774497, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.554890] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774493, 'name': CreateSnapshot_Task, 'duration_secs': 0.584081} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.557990] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1689.559141] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04fdca9d-6988-4fb2-93b0-629ea1f4a159 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.631730] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63edbbeb-562e-4d4d-88de-74ae81ef05d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.638092] env[63371]: DEBUG nova.compute.manager [req-3d09d7cf-ff3e-404c-813b-8af1df077174 req-7ea0dbe4-4c47-48d7-a698-8409d4a0b29e service nova] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Received event network-vif-deleted-24bd2275-5bff-4a52-a3f0-63ef1b63b73b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1689.638302] env[63371]: INFO nova.compute.manager [req-3d09d7cf-ff3e-404c-813b-8af1df077174 req-7ea0dbe4-4c47-48d7-a698-8409d4a0b29e service nova] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Neutron deleted interface 24bd2275-5bff-4a52-a3f0-63ef1b63b73b; detaching it from the instance and deleting it from the info cache [ 1689.638473] env[63371]: DEBUG nova.network.neutron [req-3d09d7cf-ff3e-404c-813b-8af1df077174 req-7ea0dbe4-4c47-48d7-a698-8409d4a0b29e service nova] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.642621] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3227906d-7dae-43c4-a3cc-0a5eace8c4c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.674988] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af5716a-fef7-46ff-ad8e-3f31ce7bcc01 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.686016] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0ae69e-6e35-4410-a3c9-cb6a5ae36911 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.701603] env[63371]: DEBUG nova.compute.provider_tree [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1689.762389] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1689.762812] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-9c5d73ac-5a37-41c8-a0dc-e3c374c654d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.772665] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1689.772665] env[63371]: value = "task-1774498" [ 1689.772665] env[63371]: _type = "Task" [ 1689.772665] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.786879] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774498, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.789969] env[63371]: DEBUG oslo_vmware.api [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774496, 'name': PowerOnVM_Task, 'duration_secs': 0.570355} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.790250] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1689.790481] env[63371]: INFO nova.compute.manager [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Took 8.92 seconds to spawn the instance on the hypervisor. [ 1689.790665] env[63371]: DEBUG nova.compute.manager [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1689.791500] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da53aa98-ef29-4f8f-980d-dc4efe3fe4b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.803028] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8d695311-aabb-4bad-9d01-10134b17167e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 15.257s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.007008] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774497, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597087} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.007277] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3da99cec-409f-4ea0-891c-2e9d7429674d/3da99cec-409f-4ea0-891c-2e9d7429674d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1690.007507] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1690.007767] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-20c92e70-89d4-44d5-948b-e8e14329bdd0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.016081] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1690.016081] env[63371]: value = "task-1774499" [ 1690.016081] env[63371]: _type = "Task" [ 1690.016081] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.026997] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774499, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.081748] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1690.082576] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5d2b5ee1-3111-4788-8f29-86debdff3532 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.095074] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1690.095074] env[63371]: value = "task-1774500" [ 1690.095074] env[63371]: _type = "Task" [ 1690.095074] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.102991] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774500, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.111220] env[63371]: DEBUG nova.network.neutron [-] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.124018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87558f79-b914-4bbb-9cd7-fe262d30e6e3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.124018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87558f79-b914-4bbb-9cd7-fe262d30e6e3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.124018] env[63371]: DEBUG nova.compute.manager [None req-87558f79-b914-4bbb-9cd7-fe262d30e6e3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1690.124018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc55873-75d9-4756-9aa6-1dd2fadd5a4e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.132279] env[63371]: DEBUG nova.compute.manager [None req-87558f79-b914-4bbb-9cd7-fe262d30e6e3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63371) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1690.132586] env[63371]: DEBUG nova.objects.instance [None req-87558f79-b914-4bbb-9cd7-fe262d30e6e3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'flavor' on Instance uuid 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1690.144644] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-32a97759-9cdf-4f6d-af88-f8a1cd43c3a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.156018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b51737-61a1-4163-9e5b-575e954ed466 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.193637] env[63371]: DEBUG nova.compute.manager [req-3d09d7cf-ff3e-404c-813b-8af1df077174 req-7ea0dbe4-4c47-48d7-a698-8409d4a0b29e service nova] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Detach interface failed, port_id=24bd2275-5bff-4a52-a3f0-63ef1b63b73b, reason: Instance e781866e-9b26-47c7-b1a6-d6d9547bf2fd could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1690.207385] env[63371]: DEBUG nova.scheduler.client.report [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1690.284388] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774498, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.312268] env[63371]: INFO nova.compute.manager [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Took 25.96 seconds to build instance. [ 1690.527220] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774499, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074338} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.527534] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1690.528337] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892a1748-339a-4a06-bc78-af2db7e94e39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.558981] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 3da99cec-409f-4ea0-891c-2e9d7429674d/3da99cec-409f-4ea0-891c-2e9d7429674d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1690.559409] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68a9bac5-a7ad-4f04-bdc2-d1abf2652245 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.589661] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1690.589661] env[63371]: value = "task-1774501" [ 1690.589661] env[63371]: _type = "Task" [ 1690.589661] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.602210] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774501, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.610339] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774500, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.612285] env[63371]: INFO nova.compute.manager [-] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Took 1.34 seconds to deallocate network for instance. [ 1690.638472] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-87558f79-b914-4bbb-9cd7-fe262d30e6e3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1690.638597] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a6497c7-ddc5-4c1e-9e06-98c0a38a2a46 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.648022] env[63371]: DEBUG oslo_vmware.api [None req-87558f79-b914-4bbb-9cd7-fe262d30e6e3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1690.648022] env[63371]: value = "task-1774502" [ 1690.648022] env[63371]: _type = "Task" [ 1690.648022] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.661375] env[63371]: DEBUG oslo_vmware.api [None req-87558f79-b914-4bbb-9cd7-fe262d30e6e3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774502, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.713266] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.992s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1690.716139] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 11.703s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.716357] env[63371]: DEBUG nova.objects.instance [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1690.744107] env[63371]: INFO nova.scheduler.client.report [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Deleted allocations for instance fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e [ 1690.793468] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774498, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.815363] env[63371]: DEBUG oslo_concurrency.lockutils [None req-66d15d7e-35e7-4d4d-97c4-81c09fae4c73 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Lock "c04edf6d-8a07-4776-be0f-b763fb3059d2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.474s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.063616] env[63371]: DEBUG nova.compute.manager [req-bdfccf8b-58aa-4fdf-8710-1c0a133cbf3c req-76f017bb-ab7a-4746-a427-cf6fb25c8e84 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Received event network-changed-18a0dce6-a0d5-44e7-85a3-d54e70aa89b3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1691.064074] env[63371]: DEBUG nova.compute.manager [req-bdfccf8b-58aa-4fdf-8710-1c0a133cbf3c req-76f017bb-ab7a-4746-a427-cf6fb25c8e84 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Refreshing instance network info cache due to event network-changed-18a0dce6-a0d5-44e7-85a3-d54e70aa89b3. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1691.064482] env[63371]: DEBUG oslo_concurrency.lockutils [req-bdfccf8b-58aa-4fdf-8710-1c0a133cbf3c req-76f017bb-ab7a-4746-a427-cf6fb25c8e84 service nova] Acquiring lock "refresh_cache-c04edf6d-8a07-4776-be0f-b763fb3059d2" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.064829] env[63371]: DEBUG oslo_concurrency.lockutils [req-bdfccf8b-58aa-4fdf-8710-1c0a133cbf3c req-76f017bb-ab7a-4746-a427-cf6fb25c8e84 service nova] Acquired lock "refresh_cache-c04edf6d-8a07-4776-be0f-b763fb3059d2" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.065167] env[63371]: DEBUG nova.network.neutron [req-bdfccf8b-58aa-4fdf-8710-1c0a133cbf3c req-76f017bb-ab7a-4746-a427-cf6fb25c8e84 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Refreshing network info cache for port 18a0dce6-a0d5-44e7-85a3-d54e70aa89b3 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1691.108184] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774501, 'name': ReconfigVM_Task, 'duration_secs': 0.478098} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.119159] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 3da99cec-409f-4ea0-891c-2e9d7429674d/3da99cec-409f-4ea0-891c-2e9d7429674d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1691.119159] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774500, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.119159] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02addcd2-f55c-4fb9-b739-ee72f06627f5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.123666] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.127926] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1691.127926] env[63371]: value = "task-1774503" [ 1691.127926] env[63371]: _type = "Task" [ 1691.127926] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.137663] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774503, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.160896] env[63371]: DEBUG oslo_vmware.api [None req-87558f79-b914-4bbb-9cd7-fe262d30e6e3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774502, 'name': PowerOffVM_Task, 'duration_secs': 0.448485} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.161564] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-87558f79-b914-4bbb-9cd7-fe262d30e6e3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1691.161915] env[63371]: DEBUG nova.compute.manager [None req-87558f79-b914-4bbb-9cd7-fe262d30e6e3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1691.164015] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0658b756-fb9d-410c-a4a3-fc3b789294b8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.260459] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47ceffdb-80e5-4c82-8ec6-e9c93c276a25 tempest-SecurityGroupsTestJSON-986591546 tempest-SecurityGroupsTestJSON-986591546-project-member] Lock "fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.825s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.288070] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774498, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.624477] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774500, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.639681] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774503, 'name': Rename_Task, 'duration_secs': 0.197196} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.639991] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1691.640316] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09709a1a-05d3-44ac-969c-42483359e822 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.648425] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1691.648425] env[63371]: value = "task-1774504" [ 1691.648425] env[63371]: _type = "Task" [ 1691.648425] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.664618] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774504, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.676701] env[63371]: DEBUG oslo_concurrency.lockutils [None req-87558f79-b914-4bbb-9cd7-fe262d30e6e3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.555s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.731710] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a31f2e9-24a1-4bf6-ac16-bc3889d02c58 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1691.733455] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.601s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.735615] env[63371]: INFO nova.compute.claims [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1691.794924] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774498, 'name': CloneVM_Task, 'duration_secs': 1.860581} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.795832] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Created linked-clone VM from snapshot [ 1691.796649] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cad777c2-d78e-4cb0-ab7e-a96a563e5eae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.809717] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Uploading image 496b93e2-5142-43b5-a0fc-8e75cb31f472 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1691.848088] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1691.848088] env[63371]: value = "vm-368434" [ 1691.848088] env[63371]: _type = "VirtualMachine" [ 1691.848088] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1691.848392] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6450c975-748c-480b-b93b-49f0fb0211ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.856902] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lease: (returnval){ [ 1691.856902] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52522d27-69a4-d268-951a-784be3f1ec2b" [ 1691.856902] env[63371]: _type = "HttpNfcLease" [ 1691.856902] env[63371]: } obtained for exporting VM: (result){ [ 1691.856902] env[63371]: value = "vm-368434" [ 1691.856902] env[63371]: _type = "VirtualMachine" [ 1691.856902] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1691.857245] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the lease: (returnval){ [ 1691.857245] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52522d27-69a4-d268-951a-784be3f1ec2b" [ 1691.857245] env[63371]: _type = "HttpNfcLease" [ 1691.857245] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1691.865175] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1691.865175] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52522d27-69a4-d268-951a-784be3f1ec2b" [ 1691.865175] env[63371]: _type = "HttpNfcLease" [ 1691.865175] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1692.110819] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "158259a4-f54a-4192-b235-f03838193516" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.111115] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "158259a4-f54a-4192-b235-f03838193516" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.112304] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774500, 'name': CloneVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.159953] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774504, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.288122] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.291059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.291059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.291059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1692.291059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1692.292412] env[63371]: INFO nova.compute.manager [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Terminating instance [ 1692.294783] env[63371]: DEBUG nova.compute.manager [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1692.295851] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1692.301019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176b5cb4-cce2-4364-b370-25f65a6daafc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.304755] env[63371]: DEBUG nova.network.neutron [req-bdfccf8b-58aa-4fdf-8710-1c0a133cbf3c req-76f017bb-ab7a-4746-a427-cf6fb25c8e84 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Updated VIF entry in instance network info cache for port 18a0dce6-a0d5-44e7-85a3-d54e70aa89b3. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1692.305247] env[63371]: DEBUG nova.network.neutron [req-bdfccf8b-58aa-4fdf-8710-1c0a133cbf3c req-76f017bb-ab7a-4746-a427-cf6fb25c8e84 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Updating instance_info_cache with network_info: [{"id": "18a0dce6-a0d5-44e7-85a3-d54e70aa89b3", "address": "fa:16:3e:ec:10:4a", "network": {"id": "84ced2d0-442c-416c-9d38-8d6606bd9f77", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-1880659540-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.166", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4fc21e96c1ad4c00984b1d6b832af490", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18a0dce6-a0", "ovs_interfaceid": "18a0dce6-a0d5-44e7-85a3-d54e70aa89b3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1692.313637] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1692.314507] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09bfc654-3ea5-462c-a2f3-87fee3501df3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.324022] env[63371]: DEBUG oslo_vmware.api [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1692.324022] env[63371]: value = "task-1774506" [ 1692.324022] env[63371]: _type = "Task" [ 1692.324022] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.334695] env[63371]: DEBUG oslo_vmware.api [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774506, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.369539] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1692.369539] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52522d27-69a4-d268-951a-784be3f1ec2b" [ 1692.369539] env[63371]: _type = "HttpNfcLease" [ 1692.369539] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1692.370157] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1692.370157] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52522d27-69a4-d268-951a-784be3f1ec2b" [ 1692.370157] env[63371]: _type = "HttpNfcLease" [ 1692.370157] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1692.372963] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff62dcce-5035-4e93-9647-3d0d42d9b9fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.384347] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526c1aea-1365-986a-b94b-2338879d59b1/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1692.384666] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526c1aea-1365-986a-b94b-2338879d59b1/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1692.495211] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1692.495473] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1692.518426] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-04eb2593-e609-4b72-ae76-8d38bd65f6e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.616212] env[63371]: DEBUG nova.compute.manager [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1692.625340] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774500, 'name': CloneVM_Task, 'duration_secs': 2.029314} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.625920] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Created linked-clone VM from snapshot [ 1692.626733] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2056d85c-60a9-4adf-aa87-7c990d65454e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.636594] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Uploading image e0968020-dc04-4588-9b55-b889aca0134c {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1692.665428] env[63371]: DEBUG oslo_vmware.api [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774504, 'name': PowerOnVM_Task, 'duration_secs': 0.619162} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.667713] env[63371]: DEBUG oslo_vmware.rw_handles [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1692.667713] env[63371]: value = "vm-368435" [ 1692.667713] env[63371]: _type = "VirtualMachine" [ 1692.667713] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1692.668355] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1692.668801] env[63371]: INFO nova.compute.manager [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Took 6.37 seconds to spawn the instance on the hypervisor. [ 1692.669133] env[63371]: DEBUG nova.compute.manager [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1692.669794] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f7fafc15-67eb-4f4e-b6ee-d83f6e12ad2d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.672494] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bff803d-bc33-4c03-bf29-ae83c094b179 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.686109] env[63371]: DEBUG oslo_vmware.rw_handles [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lease: (returnval){ [ 1692.686109] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52738347-0f5e-c19e-abb6-514356cb8568" [ 1692.686109] env[63371]: _type = "HttpNfcLease" [ 1692.686109] env[63371]: } obtained for exporting VM: (result){ [ 1692.686109] env[63371]: value = "vm-368435" [ 1692.686109] env[63371]: _type = "VirtualMachine" [ 1692.686109] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1692.687398] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the lease: (returnval){ [ 1692.687398] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52738347-0f5e-c19e-abb6-514356cb8568" [ 1692.687398] env[63371]: _type = "HttpNfcLease" [ 1692.687398] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1692.696173] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1692.696173] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52738347-0f5e-c19e-abb6-514356cb8568" [ 1692.696173] env[63371]: _type = "HttpNfcLease" [ 1692.696173] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1692.809026] env[63371]: DEBUG oslo_concurrency.lockutils [req-bdfccf8b-58aa-4fdf-8710-1c0a133cbf3c req-76f017bb-ab7a-4746-a427-cf6fb25c8e84 service nova] Releasing lock "refresh_cache-c04edf6d-8a07-4776-be0f-b763fb3059d2" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.846772] env[63371]: DEBUG oslo_vmware.api [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774506, 'name': PowerOffVM_Task, 'duration_secs': 0.348621} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.847300] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1692.847524] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1692.847916] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8a5a2d01-a92c-4533-897a-3371042829d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.880415] env[63371]: DEBUG nova.objects.instance [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'flavor' on Instance uuid 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1692.888260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1692.888382] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1693.015036] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1693.015451] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1693.160607] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.200155] env[63371]: INFO nova.compute.manager [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Took 15.70 seconds to build instance. [ 1693.205737] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1693.205737] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52738347-0f5e-c19e-abb6-514356cb8568" [ 1693.205737] env[63371]: _type = "HttpNfcLease" [ 1693.205737] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1693.206100] env[63371]: DEBUG oslo_vmware.rw_handles [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1693.206100] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52738347-0f5e-c19e-abb6-514356cb8568" [ 1693.206100] env[63371]: _type = "HttpNfcLease" [ 1693.206100] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1693.207023] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-988d3543-d5bd-4585-8752-e2b59b4ffdd3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.223656] env[63371]: DEBUG oslo_vmware.rw_handles [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a526cc-6460-624d-0fbc-2cf25eeebf51/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1693.223780] env[63371]: DEBUG oslo_vmware.rw_handles [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a526cc-6460-624d-0fbc-2cf25eeebf51/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1693.291981] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfcb0624-1404-4c5b-bda0-d4b169a3a9a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.301874] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6a55fe4-dc67-4375-ab77-be3a9acd3ba0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.339231] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e2d73a3-ae4b-4df7-abcc-bfb3a5db015c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.352144] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c15784-728b-454b-88c0-eec2c4f36f91 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.359674] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c1628187-1511-4d9a-ac40-3e5b82502c94 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.375531] env[63371]: DEBUG nova.compute.provider_tree [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1693.391275] env[63371]: DEBUG oslo_concurrency.lockutils [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1693.391470] env[63371]: DEBUG oslo_concurrency.lockutils [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1693.391643] env[63371]: DEBUG nova.network.neutron [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1693.391817] env[63371]: DEBUG nova.objects.instance [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'info_cache' on Instance uuid 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1693.398083] env[63371]: DEBUG nova.compute.manager [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1693.629347] env[63371]: INFO nova.compute.manager [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Rebuilding instance [ 1693.687836] env[63371]: DEBUG nova.compute.manager [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1693.688791] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd64f2e3-ba58-4765-ad41-1a09f16e0e07 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.702169] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7212a52a-365a-4700-9486-2bb5faa98942 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lock "3da99cec-409f-4ea0-891c-2e9d7429674d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.217s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1693.879464] env[63371]: DEBUG nova.scheduler.client.report [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1693.895841] env[63371]: DEBUG nova.objects.base [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Object Instance<88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec> lazy-loaded attributes: flavor,info_cache {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1693.944220] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1693.948411] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1693.949569] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1693.949569] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleting the datastore file [datastore1] e1bc4623-f6b5-4440-a58d-594e9cbe3628 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1693.950736] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c602579d-c2e3-4455-917c-2c4752481b41 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.967553] env[63371]: DEBUG oslo_vmware.api [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1693.967553] env[63371]: value = "task-1774509" [ 1693.967553] env[63371]: _type = "Task" [ 1693.967553] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.987176] env[63371]: DEBUG oslo_vmware.api [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774509, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.203124] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1694.203124] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-055f1866-ab62-4522-9611-17a2887dfeca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.213585] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1694.213585] env[63371]: value = "task-1774510" [ 1694.213585] env[63371]: _type = "Task" [ 1694.213585] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.224426] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774510, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.389985] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.656s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.390469] env[63371]: DEBUG nova.compute.manager [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1694.396460] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.917s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1694.398494] env[63371]: INFO nova.compute.claims [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1694.482396] env[63371]: DEBUG oslo_vmware.api [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774509, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.326056} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.482694] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1694.482870] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1694.483050] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1694.483439] env[63371]: INFO nova.compute.manager [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Took 2.19 seconds to destroy the instance on the hypervisor. [ 1694.483522] env[63371]: DEBUG oslo.service.loopingcall [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1694.483742] env[63371]: DEBUG nova.compute.manager [-] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1694.483742] env[63371]: DEBUG nova.network.neutron [-] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1694.711466] env[63371]: DEBUG nova.network.neutron [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [{"id": "01b878e5-651e-49f1-959f-7da17291c0bc", "address": "fa:16:3e:b7:c4:0c", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b878e5-65", "ovs_interfaceid": "01b878e5-651e-49f1-959f-7da17291c0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1694.727327] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774510, 'name': PowerOffVM_Task, 'duration_secs': 0.156865} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.728422] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1694.729704] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1694.731120] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b41d16-bebd-4739-a509-c8e4e2d1fe2f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.747349] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1694.747349] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-661f372a-2582-4220-ad18-932f3df53c3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.787176] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1694.788112] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1694.788647] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Deleting the datastore file [datastore1] 3da99cec-409f-4ea0-891c-2e9d7429674d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1694.789737] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4fe18170-f5b6-445a-856a-94203b8b78bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.797531] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1694.797531] env[63371]: value = "task-1774512" [ 1694.797531] env[63371]: _type = "Task" [ 1694.797531] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.810215] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774512, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.815226] env[63371]: DEBUG nova.compute.manager [req-7666a3ad-f94e-4850-9990-9b82fb331d54 req-e13a2acc-c4e3-40f3-930b-497a47f4d622 service nova] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Received event network-vif-deleted-826bbbf2-7d7e-47d0-9516-4cb91c3d94a7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1694.815333] env[63371]: INFO nova.compute.manager [req-7666a3ad-f94e-4850-9990-9b82fb331d54 req-e13a2acc-c4e3-40f3-930b-497a47f4d622 service nova] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Neutron deleted interface 826bbbf2-7d7e-47d0-9516-4cb91c3d94a7; detaching it from the instance and deleting it from the info cache [ 1694.815612] env[63371]: DEBUG nova.network.neutron [req-7666a3ad-f94e-4850-9990-9b82fb331d54 req-e13a2acc-c4e3-40f3-930b-497a47f4d622 service nova] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1694.906129] env[63371]: DEBUG nova.compute.utils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1694.912327] env[63371]: DEBUG nova.compute.manager [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1694.912327] env[63371]: DEBUG nova.network.neutron [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1694.987193] env[63371]: DEBUG nova.policy [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25f28e53648c41d1a147c1aa04f0a708', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fb0da840f6847f19f03a1db8a1c3f4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1695.215133] env[63371]: DEBUG oslo_concurrency.lockutils [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1695.272754] env[63371]: DEBUG nova.network.neutron [-] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.313232] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774512, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127369} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.314568] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1695.314568] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1695.314677] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1695.321772] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5d3bd2e-4186-47e7-be1a-81bdd2a56041 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.332774] env[63371]: DEBUG nova.network.neutron [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Successfully created port: 421d7cf6-e899-4181-9f5b-07a12ec96caf {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1695.342118] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82126db8-f88e-4d12-8a28-419f2aaad4b8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.390434] env[63371]: DEBUG nova.compute.manager [req-7666a3ad-f94e-4850-9990-9b82fb331d54 req-e13a2acc-c4e3-40f3-930b-497a47f4d622 service nova] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Detach interface failed, port_id=826bbbf2-7d7e-47d0-9516-4cb91c3d94a7, reason: Instance e1bc4623-f6b5-4440-a58d-594e9cbe3628 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1695.412134] env[63371]: DEBUG nova.compute.manager [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1695.720228] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1695.722023] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2c551fb2-8c44-4c83-b511-3d9772dae61d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.730959] env[63371]: DEBUG oslo_vmware.api [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1695.730959] env[63371]: value = "task-1774513" [ 1695.730959] env[63371]: _type = "Task" [ 1695.730959] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.746736] env[63371]: DEBUG oslo_vmware.api [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774513, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.775992] env[63371]: INFO nova.compute.manager [-] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Took 1.29 seconds to deallocate network for instance. [ 1695.841955] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0bd3ef-2546-42eb-8174-e86a9b14912f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.851949] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb7f965-a74d-43a3-b757-e07800a608b7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.887307] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b96a5f-3586-41f7-b480-ac57d0204848 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.897659] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780c41f0-a016-4573-8bb1-88783a8921d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.916113] env[63371]: DEBUG nova.compute.provider_tree [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1696.113795] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "195de525-1081-4db6-acf3-04a6d3eb142f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.114177] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "195de525-1081-4db6-acf3-04a6d3eb142f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.114449] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "195de525-1081-4db6-acf3-04a6d3eb142f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.114667] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "195de525-1081-4db6-acf3-04a6d3eb142f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.114850] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "195de525-1081-4db6-acf3-04a6d3eb142f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.117653] env[63371]: INFO nova.compute.manager [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Terminating instance [ 1696.119666] env[63371]: DEBUG nova.compute.manager [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1696.119858] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1696.121330] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec8119f-00d9-4672-a6d3-8db1b53c8906 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.130785] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1696.131104] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0edace5b-7162-4899-bf3b-3d8e005ba0bc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.139718] env[63371]: DEBUG oslo_vmware.api [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1696.139718] env[63371]: value = "task-1774514" [ 1696.139718] env[63371]: _type = "Task" [ 1696.139718] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.150119] env[63371]: DEBUG oslo_vmware.api [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774514, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.243555] env[63371]: DEBUG oslo_vmware.api [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774513, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.287055] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.424791] env[63371]: DEBUG nova.compute.manager [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1696.446998] env[63371]: ERROR nova.scheduler.client.report [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [req-c9aabaa6-d68b-45c2-bc6c-0b8d5183e786] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c9aabaa6-d68b-45c2-bc6c-0b8d5183e786"}]} [ 1696.469254] env[63371]: DEBUG nova.scheduler.client.report [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1696.483812] env[63371]: DEBUG nova.scheduler.client.report [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1696.484073] env[63371]: DEBUG nova.compute.provider_tree [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1696.498524] env[63371]: DEBUG nova.scheduler.client.report [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1696.520875] env[63371]: DEBUG nova.scheduler.client.report [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1696.652539] env[63371]: DEBUG oslo_vmware.api [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774514, 'name': PowerOffVM_Task, 'duration_secs': 0.313496} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.655603] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1696.655868] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1696.657496] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6bfbe781-6671-4b28-9e40-066dac865e58 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.747119] env[63371]: DEBUG oslo_vmware.api [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774513, 'name': PowerOnVM_Task, 'duration_secs': 0.518587} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.750098] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1696.750335] env[63371]: DEBUG nova.compute.manager [None req-88450410-93a8-4f63-a7d0-989f29022cb6 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1696.751366] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38eb01ed-4870-499f-aab4-c9dabb86ff8a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.872224] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0d256c-b61a-4fdf-9f9e-0b74cdd0458c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.876922] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1696.877165] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1696.877350] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Deleting the datastore file [datastore1] 195de525-1081-4db6-acf3-04a6d3eb142f {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1696.877601] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c3081196-e525-45ea-b53e-2592149209cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.883225] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a7bbe3-af08-4091-81f9-1cd5fd349d30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.888189] env[63371]: DEBUG oslo_vmware.api [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for the task: (returnval){ [ 1696.888189] env[63371]: value = "task-1774516" [ 1696.888189] env[63371]: _type = "Task" [ 1696.888189] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.927601] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c1531f-7256-47cc-bce5-d6dde6127f3b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.934753] env[63371]: DEBUG oslo_vmware.api [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774516, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.942172] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83258a31-b44c-45c8-aedc-6188d0c828c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.958863] env[63371]: DEBUG nova.compute.provider_tree [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1696.964022] env[63371]: DEBUG nova.compute.manager [req-7a3bfbed-4792-4690-83e4-761384229a16 req-c4d902f8-fcbb-462b-8916-57f2b15a2c09 service nova] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Received event network-vif-plugged-421d7cf6-e899-4181-9f5b-07a12ec96caf {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1696.964022] env[63371]: DEBUG oslo_concurrency.lockutils [req-7a3bfbed-4792-4690-83e4-761384229a16 req-c4d902f8-fcbb-462b-8916-57f2b15a2c09 service nova] Acquiring lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1696.964022] env[63371]: DEBUG oslo_concurrency.lockutils [req-7a3bfbed-4792-4690-83e4-761384229a16 req-c4d902f8-fcbb-462b-8916-57f2b15a2c09 service nova] Lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1696.964022] env[63371]: DEBUG oslo_concurrency.lockutils [req-7a3bfbed-4792-4690-83e4-761384229a16 req-c4d902f8-fcbb-462b-8916-57f2b15a2c09 service nova] Lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1696.964022] env[63371]: DEBUG nova.compute.manager [req-7a3bfbed-4792-4690-83e4-761384229a16 req-c4d902f8-fcbb-462b-8916-57f2b15a2c09 service nova] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] No waiting events found dispatching network-vif-plugged-421d7cf6-e899-4181-9f5b-07a12ec96caf {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1696.964022] env[63371]: WARNING nova.compute.manager [req-7a3bfbed-4792-4690-83e4-761384229a16 req-c4d902f8-fcbb-462b-8916-57f2b15a2c09 service nova] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Received unexpected event network-vif-plugged-421d7cf6-e899-4181-9f5b-07a12ec96caf for instance with vm_state building and task_state spawning. [ 1697.095523] env[63371]: DEBUG nova.network.neutron [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Successfully updated port: 421d7cf6-e899-4181-9f5b-07a12ec96caf {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1697.399669] env[63371]: DEBUG oslo_vmware.api [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Task: {'id': task-1774516, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.255549} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.399934] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1697.400147] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1697.400319] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1697.400487] env[63371]: INFO nova.compute.manager [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1697.400728] env[63371]: DEBUG oslo.service.loopingcall [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1697.400922] env[63371]: DEBUG nova.compute.manager [-] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1697.401029] env[63371]: DEBUG nova.network.neutron [-] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1697.502173] env[63371]: DEBUG nova.scheduler.client.report [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 122 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1697.502468] env[63371]: DEBUG nova.compute.provider_tree [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 122 to 123 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1697.502648] env[63371]: DEBUG nova.compute.provider_tree [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1697.598451] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "refresh_cache-382a5997-90bb-4bbc-b595-23c8d2f2e1f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1697.598711] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "refresh_cache-382a5997-90bb-4bbc-b595-23c8d2f2e1f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1697.599045] env[63371]: DEBUG nova.network.neutron [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1698.010311] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.612s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.010311] env[63371]: DEBUG nova.compute.manager [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1698.014577] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.513s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.018226] env[63371]: INFO nova.compute.claims [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1698.148543] env[63371]: DEBUG nova.network.neutron [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1698.364138] env[63371]: DEBUG nova.network.neutron [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Updating instance_info_cache with network_info: [{"id": "421d7cf6-e899-4181-9f5b-07a12ec96caf", "address": "fa:16:3e:59:a1:5e", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap421d7cf6-e8", "ovs_interfaceid": "421d7cf6-e899-4181-9f5b-07a12ec96caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.523544] env[63371]: DEBUG nova.compute.utils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1698.525066] env[63371]: DEBUG nova.compute.manager [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1698.525270] env[63371]: DEBUG nova.network.neutron [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1698.533027] env[63371]: DEBUG nova.network.neutron [-] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.585503] env[63371]: DEBUG nova.policy [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c6416719728485f8dd45eea9e39fdc5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58f967d3770541269fb89f48b3df58c9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1698.867107] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "refresh_cache-382a5997-90bb-4bbc-b595-23c8d2f2e1f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1698.867398] env[63371]: DEBUG nova.compute.manager [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Instance network_info: |[{"id": "421d7cf6-e899-4181-9f5b-07a12ec96caf", "address": "fa:16:3e:59:a1:5e", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap421d7cf6-e8", "ovs_interfaceid": "421d7cf6-e899-4181-9f5b-07a12ec96caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1699.031995] env[63371]: DEBUG nova.compute.manager [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1699.037873] env[63371]: INFO nova.compute.manager [-] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Took 1.64 seconds to deallocate network for instance. [ 1699.216312] env[63371]: DEBUG nova.network.neutron [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Successfully created port: bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1699.369299] env[63371]: DEBUG nova.compute.manager [req-ae7e7033-9b3a-4bb6-97d7-0453daa28539 req-5d1e259a-41ee-49c8-8100-d76a39fdae1d service nova] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Received event network-changed-421d7cf6-e899-4181-9f5b-07a12ec96caf {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1699.369299] env[63371]: DEBUG nova.compute.manager [req-ae7e7033-9b3a-4bb6-97d7-0453daa28539 req-5d1e259a-41ee-49c8-8100-d76a39fdae1d service nova] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Refreshing instance network info cache due to event network-changed-421d7cf6-e899-4181-9f5b-07a12ec96caf. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1699.369299] env[63371]: DEBUG oslo_concurrency.lockutils [req-ae7e7033-9b3a-4bb6-97d7-0453daa28539 req-5d1e259a-41ee-49c8-8100-d76a39fdae1d service nova] Acquiring lock "refresh_cache-382a5997-90bb-4bbc-b595-23c8d2f2e1f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1699.369299] env[63371]: DEBUG oslo_concurrency.lockutils [req-ae7e7033-9b3a-4bb6-97d7-0453daa28539 req-5d1e259a-41ee-49c8-8100-d76a39fdae1d service nova] Acquired lock "refresh_cache-382a5997-90bb-4bbc-b595-23c8d2f2e1f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1699.369299] env[63371]: DEBUG nova.network.neutron [req-ae7e7033-9b3a-4bb6-97d7-0453daa28539 req-5d1e259a-41ee-49c8-8100-d76a39fdae1d service nova] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Refreshing network info cache for port 421d7cf6-e899-4181-9f5b-07a12ec96caf {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1699.386761] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51ad43f-bbb3-4fbd-994c-b1e8d36e6135 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.398600] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d74f443-d179-44d7-b4a8-21893dc9bf0d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.433920] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e469660-de97-4a1c-9968-6f3e6f591bd3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.443147] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16671718-e69b-407c-b0b5-08453832dc20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.462064] env[63371]: DEBUG nova.compute.provider_tree [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1699.546370] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.965955] env[63371]: DEBUG nova.scheduler.client.report [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1700.044670] env[63371]: DEBUG nova.compute.manager [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1700.107768] env[63371]: DEBUG nova.network.neutron [req-ae7e7033-9b3a-4bb6-97d7-0453daa28539 req-5d1e259a-41ee-49c8-8100-d76a39fdae1d service nova] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Updated VIF entry in instance network info cache for port 421d7cf6-e899-4181-9f5b-07a12ec96caf. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1700.108188] env[63371]: DEBUG nova.network.neutron [req-ae7e7033-9b3a-4bb6-97d7-0453daa28539 req-5d1e259a-41ee-49c8-8100-d76a39fdae1d service nova] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Updating instance_info_cache with network_info: [{"id": "421d7cf6-e899-4181-9f5b-07a12ec96caf", "address": "fa:16:3e:59:a1:5e", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap421d7cf6-e8", "ovs_interfaceid": "421d7cf6-e899-4181-9f5b-07a12ec96caf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.471388] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.456s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.471671] env[63371]: DEBUG nova.compute.manager [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1700.474480] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.845s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.474710] env[63371]: DEBUG nova.objects.instance [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lazy-loading 'resources' on Instance uuid 33952466-3df7-4485-8e7a-ab3d6ec3f22c {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1700.610643] env[63371]: DEBUG oslo_concurrency.lockutils [req-ae7e7033-9b3a-4bb6-97d7-0453daa28539 req-5d1e259a-41ee-49c8-8100-d76a39fdae1d service nova] Releasing lock "refresh_cache-382a5997-90bb-4bbc-b595-23c8d2f2e1f0" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.611228] env[63371]: DEBUG nova.compute.manager [req-ae7e7033-9b3a-4bb6-97d7-0453daa28539 req-5d1e259a-41ee-49c8-8100-d76a39fdae1d service nova] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Received event network-vif-deleted-8e9291f7-154c-4bfa-bfd8-f09dbd9b4963 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1700.920086] env[63371]: DEBUG nova.network.neutron [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Successfully updated port: bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1700.977629] env[63371]: DEBUG nova.compute.utils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1700.983153] env[63371]: DEBUG nova.compute.manager [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Not allocating networking since 'none' was specified. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1701.291716] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0ab9c9-2e67-4239-bdf4-9eba4e759b92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.303166] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9266d5a1-50f7-47ea-8ebe-af77319634c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.339391] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731ec4d5-227e-43a4-84d1-c2f3f5703a5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.347940] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d85476-b517-40e1-be40-84ab325f82f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.356061] env[63371]: DEBUG nova.virt.hardware [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1701.356320] env[63371]: DEBUG nova.virt.hardware [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1701.356476] env[63371]: DEBUG nova.virt.hardware [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1701.356655] env[63371]: DEBUG nova.virt.hardware [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1701.356798] env[63371]: DEBUG nova.virt.hardware [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1701.356943] env[63371]: DEBUG nova.virt.hardware [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1701.357169] env[63371]: DEBUG nova.virt.hardware [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1701.357326] env[63371]: DEBUG nova.virt.hardware [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1701.357489] env[63371]: DEBUG nova.virt.hardware [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1701.357647] env[63371]: DEBUG nova.virt.hardware [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1701.357813] env[63371]: DEBUG nova.virt.hardware [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1701.360034] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179ff1c4-130f-49f9-a408-1bdcfd0d11d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.370492] env[63371]: DEBUG nova.compute.provider_tree [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1701.378626] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158ca7fa-31c5-47ed-86e7-7e99fd76f30b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.394050] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1701.399632] env[63371]: DEBUG oslo.service.loopingcall [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1701.401959] env[63371]: DEBUG nova.virt.hardware [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1701.402248] env[63371]: DEBUG nova.virt.hardware [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1701.402426] env[63371]: DEBUG nova.virt.hardware [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1701.402623] env[63371]: DEBUG nova.virt.hardware [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1701.402774] env[63371]: DEBUG nova.virt.hardware [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1701.402923] env[63371]: DEBUG nova.virt.hardware [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1701.403196] env[63371]: DEBUG nova.virt.hardware [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1701.403309] env[63371]: DEBUG nova.virt.hardware [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1701.403473] env[63371]: DEBUG nova.virt.hardware [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1701.403628] env[63371]: DEBUG nova.virt.hardware [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1701.403795] env[63371]: DEBUG nova.virt.hardware [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1701.404365] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1701.406798] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53eb93de-4e75-431d-88e4-963ebb83c0fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.408884] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82b31b0f-ff8d-4351-8820-83c47727a1eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.422519] env[63371]: DEBUG nova.virt.hardware [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1701.422739] env[63371]: DEBUG nova.virt.hardware [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1701.422884] env[63371]: DEBUG nova.virt.hardware [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1701.423136] env[63371]: DEBUG nova.virt.hardware [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1701.423236] env[63371]: DEBUG nova.virt.hardware [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1701.423359] env[63371]: DEBUG nova.virt.hardware [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1701.423686] env[63371]: DEBUG nova.virt.hardware [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1701.423849] env[63371]: DEBUG nova.virt.hardware [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1701.423849] env[63371]: DEBUG nova.virt.hardware [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1701.424019] env[63371]: DEBUG nova.virt.hardware [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1701.424362] env[63371]: DEBUG nova.virt.hardware [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1701.426450] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "refresh_cache-cf63c2a2-ee72-464e-944d-5e53ca8635ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.426582] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "refresh_cache-cf63c2a2-ee72-464e-944d-5e53ca8635ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.426720] env[63371]: DEBUG nova.network.neutron [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1701.428797] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54dfc738-17dc-4fa6-ad9e-c782639df827 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.436575] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a2c801-b276-45b0-83d9-188435a91e84 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.444794] env[63371]: DEBUG oslo_vmware.rw_handles [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a526cc-6460-624d-0fbc-2cf25eeebf51/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1701.446545] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1701.446545] env[63371]: value = "task-1774517" [ 1701.446545] env[63371]: _type = "Task" [ 1701.446545] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.447357] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9097c804-51ea-49e3-bccc-0ba79d90c5f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.454482] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88441b9-d56b-48ed-a9a5-46ca14964d9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.459427] env[63371]: DEBUG nova.compute.manager [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Received event network-vif-plugged-bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1701.459636] env[63371]: DEBUG oslo_concurrency.lockutils [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] Acquiring lock "cf63c2a2-ee72-464e-944d-5e53ca8635ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.459836] env[63371]: DEBUG oslo_concurrency.lockutils [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] Lock "cf63c2a2-ee72-464e-944d-5e53ca8635ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.459993] env[63371]: DEBUG oslo_concurrency.lockutils [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] Lock "cf63c2a2-ee72-464e-944d-5e53ca8635ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.460179] env[63371]: DEBUG nova.compute.manager [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] No waiting events found dispatching network-vif-plugged-bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1701.460370] env[63371]: WARNING nova.compute.manager [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Received unexpected event network-vif-plugged-bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f for instance with vm_state building and task_state spawning. [ 1701.460480] env[63371]: DEBUG nova.compute.manager [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Received event network-changed-bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1701.460624] env[63371]: DEBUG nova.compute.manager [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Refreshing instance network info cache due to event network-changed-bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1701.460783] env[63371]: DEBUG oslo_concurrency.lockutils [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] Acquiring lock "refresh_cache-cf63c2a2-ee72-464e-944d-5e53ca8635ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.472214] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:a1:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '421d7cf6-e899-4181-9f5b-07a12ec96caf', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1701.479837] env[63371]: DEBUG oslo.service.loopingcall [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1701.482287] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1701.482780] env[63371]: DEBUG oslo_vmware.rw_handles [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a526cc-6460-624d-0fbc-2cf25eeebf51/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1701.482943] env[63371]: ERROR oslo_vmware.rw_handles [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a526cc-6460-624d-0fbc-2cf25eeebf51/disk-0.vmdk due to incomplete transfer. [ 1701.483500] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ecd7d4d2-cb96-4247-91cd-c7961b380a46 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.497998] env[63371]: DEBUG nova.compute.manager [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1701.500485] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-96755328-ec70-4dcd-8629-009e9bd1b214 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.519175] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774517, 'name': CreateVM_Task} progress is 15%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.529555] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1701.529555] env[63371]: value = "task-1774518" [ 1701.529555] env[63371]: _type = "Task" [ 1701.529555] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.539367] env[63371]: DEBUG oslo_vmware.rw_handles [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52a526cc-6460-624d-0fbc-2cf25eeebf51/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1701.539589] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Uploaded image e0968020-dc04-4588-9b55-b889aca0134c to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1701.541511] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1701.545015] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-be95100d-9da4-4ebc-8d4e-cb01d57815d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.547873] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774518, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.554270] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1701.554270] env[63371]: value = "task-1774519" [ 1701.554270] env[63371]: _type = "Task" [ 1701.554270] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.564986] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774519, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.617028] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.617257] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquired lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.617637] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Forcefully refreshing network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1701.805462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.805462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.805462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.805462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.805462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.806894] env[63371]: INFO nova.compute.manager [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Terminating instance [ 1701.811599] env[63371]: DEBUG nova.compute.manager [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1701.811599] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1701.813021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fea8d81-4b83-4d00-95d7-5300d2f2d7d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.821841] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1701.822197] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f58c4cf3-e178-48ce-a492-27fdc190b243 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.833544] env[63371]: DEBUG oslo_vmware.api [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1701.833544] env[63371]: value = "task-1774520" [ 1701.833544] env[63371]: _type = "Task" [ 1701.833544] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.843013] env[63371]: DEBUG oslo_vmware.api [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774520, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.874250] env[63371]: DEBUG nova.scheduler.client.report [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1701.965784] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774517, 'name': CreateVM_Task, 'duration_secs': 0.422707} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.965979] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1701.966525] env[63371]: DEBUG oslo_concurrency.lockutils [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1701.966625] env[63371]: DEBUG oslo_concurrency.lockutils [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1701.966968] env[63371]: DEBUG oslo_concurrency.lockutils [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1701.967247] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1eb90a4c-a4ca-497c-b8a4-707a0ad71b29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.973687] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1701.973687] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52677e99-4ca1-0e03-d8e6-1e7211a9d619" [ 1701.973687] env[63371]: _type = "Task" [ 1701.973687] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.981396] env[63371]: DEBUG nova.network.neutron [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1701.987487] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52677e99-4ca1-0e03-d8e6-1e7211a9d619, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.040381] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774518, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.065622] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774519, 'name': Destroy_Task, 'duration_secs': 0.444358} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.067689] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Destroyed the VM [ 1702.067957] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1702.068876] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fa559b2a-fa72-4088-a8ec-0ded5a390694 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.080753] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1702.080753] env[63371]: value = "task-1774521" [ 1702.080753] env[63371]: _type = "Task" [ 1702.080753] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.094364] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774521, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.160450] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526c1aea-1365-986a-b94b-2338879d59b1/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1702.161446] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2549fd-d3ac-4bb1-8555-cc7346e6994b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.170848] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526c1aea-1365-986a-b94b-2338879d59b1/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1702.171096] env[63371]: ERROR oslo_vmware.rw_handles [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526c1aea-1365-986a-b94b-2338879d59b1/disk-0.vmdk due to incomplete transfer. [ 1702.171336] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-88641d2b-d392-48a3-8d6f-80700ec6534d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.190519] env[63371]: DEBUG nova.network.neutron [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Updating instance_info_cache with network_info: [{"id": "bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f", "address": "fa:16:3e:df:dd:99", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcf2f3d2-8a", "ovs_interfaceid": "bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.193122] env[63371]: DEBUG oslo_vmware.rw_handles [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/526c1aea-1365-986a-b94b-2338879d59b1/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1702.194021] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Uploaded image 496b93e2-5142-43b5-a0fc-8e75cb31f472 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1702.196492] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1702.197597] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-3e9f9a4c-e255-4ec8-8361-b053fa369619 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.206936] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1702.206936] env[63371]: value = "task-1774522" [ 1702.206936] env[63371]: _type = "Task" [ 1702.206936] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.216563] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774522, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.259094] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "11527051-7a4f-481a-b5ed-14550c550c4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.259291] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "11527051-7a4f-481a-b5ed-14550c550c4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.344680] env[63371]: DEBUG oslo_vmware.api [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774520, 'name': PowerOffVM_Task, 'duration_secs': 0.352123} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.344955] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1702.345137] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1702.345397] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4651e557-3e60-4917-bdbf-28e1c510d01a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.380293] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.906s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.382942] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.239s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.385307] env[63371]: INFO nova.compute.claims [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1702.400478] env[63371]: INFO nova.scheduler.client.report [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Deleted allocations for instance 33952466-3df7-4485-8e7a-ab3d6ec3f22c [ 1702.485171] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52677e99-4ca1-0e03-d8e6-1e7211a9d619, 'name': SearchDatastore_Task, 'duration_secs': 0.012957} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.485492] env[63371]: DEBUG oslo_concurrency.lockutils [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.485722] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1702.485971] env[63371]: DEBUG oslo_concurrency.lockutils [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.486105] env[63371]: DEBUG oslo_concurrency.lockutils [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.486296] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1702.486582] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-25953c86-4c91-4078-a10b-04b0c9ad3ec9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.496698] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1702.496870] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1702.497649] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86b26f46-ed03-4c7f-824f-24519ca9b2fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.503360] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1702.503360] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5231e061-cb37-be2f-daed-5fe790e1504b" [ 1702.503360] env[63371]: _type = "Task" [ 1702.503360] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.512631] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5231e061-cb37-be2f-daed-5fe790e1504b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.526424] env[63371]: DEBUG nova.compute.manager [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1702.528542] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1702.528676] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1702.528827] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Deleting the datastore file [datastore1] 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1702.529658] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a293b1f2-f22d-45c6-a74d-7383016ea396 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.542949] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774518, 'name': CreateVM_Task, 'duration_secs': 0.51733} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.544254] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1702.544604] env[63371]: DEBUG oslo_vmware.api [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1702.544604] env[63371]: value = "task-1774524" [ 1702.544604] env[63371]: _type = "Task" [ 1702.544604] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.547012] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.547206] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.547515] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1702.548248] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8867384-4454-4ad8-843d-09752bd52c20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.554924] env[63371]: DEBUG nova.virt.hardware [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1702.555171] env[63371]: DEBUG nova.virt.hardware [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1702.555333] env[63371]: DEBUG nova.virt.hardware [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1702.555518] env[63371]: DEBUG nova.virt.hardware [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1702.555663] env[63371]: DEBUG nova.virt.hardware [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1702.555806] env[63371]: DEBUG nova.virt.hardware [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1702.556068] env[63371]: DEBUG nova.virt.hardware [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1702.556218] env[63371]: DEBUG nova.virt.hardware [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1702.556380] env[63371]: DEBUG nova.virt.hardware [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1702.556545] env[63371]: DEBUG nova.virt.hardware [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1702.556728] env[63371]: DEBUG nova.virt.hardware [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1702.557482] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15952220-f112-4c8a-b789-31c458c61285 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.565649] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1702.565649] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5202c281-3ae6-b2e7-4f1c-ff3a125fbc46" [ 1702.565649] env[63371]: _type = "Task" [ 1702.565649] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.569247] env[63371]: DEBUG oslo_vmware.api [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774524, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.573329] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8292c74-40aa-4033-bad4-642b429cf72b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.584592] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5202c281-3ae6-b2e7-4f1c-ff3a125fbc46, 'name': SearchDatastore_Task, 'duration_secs': 0.01036} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.596551] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.596810] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1702.597108] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1702.597503] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1702.603165] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Creating folder: Project (219aabcf4bfa4f918bbc6c38ffd5daa0). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1702.603846] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7528973b-2c93-497c-af8e-8f14abd493f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.611731] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774521, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.624291] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Created folder: Project (219aabcf4bfa4f918bbc6c38ffd5daa0) in parent group-v368199. [ 1702.624549] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Creating folder: Instances. Parent ref: group-v368438. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1702.624846] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aec0bcf3-0708-4488-bb3d-df96eb8f51ad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.636154] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Created folder: Instances in parent group-v368438. [ 1702.636951] env[63371]: DEBUG oslo.service.loopingcall [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1702.637322] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1702.637581] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4203d15b-a9d6-47c9-80aa-e7798160a547 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.656737] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1702.656737] env[63371]: value = "task-1774527" [ 1702.656737] env[63371]: _type = "Task" [ 1702.656737] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.664836] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774527, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.684044] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "f391d4f3-6e9d-4ddc-918a-8dc8581dfc00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.684306] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "f391d4f3-6e9d-4ddc-918a-8dc8581dfc00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.698466] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "refresh_cache-cf63c2a2-ee72-464e-944d-5e53ca8635ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1702.698823] env[63371]: DEBUG nova.compute.manager [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Instance network_info: |[{"id": "bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f", "address": "fa:16:3e:df:dd:99", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcf2f3d2-8a", "ovs_interfaceid": "bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1702.699486] env[63371]: DEBUG oslo_concurrency.lockutils [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] Acquired lock "refresh_cache-cf63c2a2-ee72-464e-944d-5e53ca8635ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1702.699690] env[63371]: DEBUG nova.network.neutron [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Refreshing network info cache for port bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1702.701077] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:dd:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1702.708878] env[63371]: DEBUG oslo.service.loopingcall [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1702.713967] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1702.717279] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78d0a801-d8ef-4b19-aff2-c74a9ddcb581 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.739739] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774522, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.741119] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1702.741119] env[63371]: value = "task-1774528" [ 1702.741119] env[63371]: _type = "Task" [ 1702.741119] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.752063] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774528, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.761803] env[63371]: DEBUG nova.compute.manager [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1702.910439] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5b382968-5921-4c70-a75e-417a6cee5e27 tempest-ServerShowV254Test-2026313346 tempest-ServerShowV254Test-2026313346-project-member] Lock "33952466-3df7-4485-8e7a-ab3d6ec3f22c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.079s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.945354] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updating instance_info_cache with network_info: [{"id": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "address": "fa:16:3e:b7:8f:81", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb03ddfde-3b", "ovs_interfaceid": "b03ddfde-3b36-43a8-8c6a-00cd704bce22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.020777] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5231e061-cb37-be2f-daed-5fe790e1504b, 'name': SearchDatastore_Task, 'duration_secs': 0.012261} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.021746] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbbbace7-1b43-4c38-9c86-8bf3a4138a3c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.033109] env[63371]: DEBUG nova.network.neutron [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Updated VIF entry in instance network info cache for port bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1703.033515] env[63371]: DEBUG nova.network.neutron [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Updating instance_info_cache with network_info: [{"id": "bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f", "address": "fa:16:3e:df:dd:99", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcf2f3d2-8a", "ovs_interfaceid": "bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.035028] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "7349ecf6-2de7-4540-b713-7e29cbd3ff0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.035277] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "7349ecf6-2de7-4540-b713-7e29cbd3ff0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1703.036422] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1703.036422] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5259e3f1-3cc5-4ad7-9a0c-6dd49ddd5d1e" [ 1703.036422] env[63371]: _type = "Task" [ 1703.036422] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.052128] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5259e3f1-3cc5-4ad7-9a0c-6dd49ddd5d1e, 'name': SearchDatastore_Task, 'duration_secs': 0.01241} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.060854] env[63371]: DEBUG oslo_concurrency.lockutils [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.061433] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3da99cec-409f-4ea0-891c-2e9d7429674d/3da99cec-409f-4ea0-891c-2e9d7429674d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1703.062913] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.063404] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1703.063772] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9077ef68-b47f-4404-935b-350d89c86bca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.068934] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a1f121c0-ed29-40f3-936c-6d430c986efc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.079791] env[63371]: DEBUG oslo_vmware.api [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774524, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143549} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.081176] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1703.081512] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1703.081822] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1703.082191] env[63371]: INFO nova.compute.manager [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1703.082630] env[63371]: DEBUG oslo.service.loopingcall [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1703.082975] env[63371]: DEBUG nova.compute.manager [-] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1703.083176] env[63371]: DEBUG nova.network.neutron [-] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1703.088885] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1703.088885] env[63371]: value = "task-1774529" [ 1703.088885] env[63371]: _type = "Task" [ 1703.088885] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.103442] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1703.103811] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1703.106365] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31003431-a51e-4604-8728-f499533e688a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.117711] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774521, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.122534] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1703.122534] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52eccfbf-34b1-bd3d-0c6f-d54ceaf34d4c" [ 1703.122534] env[63371]: _type = "Task" [ 1703.122534] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.128475] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774529, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.149029] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52eccfbf-34b1-bd3d-0c6f-d54ceaf34d4c, 'name': SearchDatastore_Task, 'duration_secs': 0.01639} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.150453] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22a74b01-b583-478a-bc5c-17ce40b73122 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.161212] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1703.161212] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a993ad-c680-2162-7908-77c1eea36335" [ 1703.161212] env[63371]: _type = "Task" [ 1703.161212] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.178069] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774527, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.181368] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a993ad-c680-2162-7908-77c1eea36335, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.186788] env[63371]: DEBUG nova.compute.manager [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1703.223517] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774522, 'name': Destroy_Task, 'duration_secs': 0.920099} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.223876] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Destroyed the VM [ 1703.224018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1703.224286] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-803a2f87-4040-4ed6-8681-1337c7686dc4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.233746] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1703.233746] env[63371]: value = "task-1774530" [ 1703.233746] env[63371]: _type = "Task" [ 1703.233746] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.244485] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774530, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.254654] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774528, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.298381] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.448631] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Releasing lock "refresh_cache-44cc8606-24f5-4f6b-b96f-3559c9c3f06e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.448935] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updated the network info_cache for instance {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1703.449124] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.449299] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.450047] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.450442] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.450787] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.451008] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.451228] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1703.451430] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1703.537979] env[63371]: DEBUG oslo_concurrency.lockutils [req-41a5f927-60d7-4b5c-a421-a2de6fa9ca78 req-30c591ed-c9bb-475c-b42b-e271bf4c01d3 service nova] Releasing lock "refresh_cache-cf63c2a2-ee72-464e-944d-5e53ca8635ac" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.540943] env[63371]: DEBUG nova.compute.manager [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1703.610645] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774521, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.619873] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774529, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.681416] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774527, 'name': CreateVM_Task, 'duration_secs': 0.635634} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.681668] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1703.681939] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.682195] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.682579] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1703.682987] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bdd25a6-862f-453b-a686-7304b6c3090e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.689742] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a993ad-c680-2162-7908-77c1eea36335, 'name': SearchDatastore_Task, 'duration_secs': 0.017802} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.692980] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1703.693740] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 382a5997-90bb-4bbc-b595-23c8d2f2e1f0/382a5997-90bb-4bbc-b595-23c8d2f2e1f0.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1703.696119] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8154c32-2c60-4065-80c2-435a91cc4549 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.702469] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Waiting for the task: (returnval){ [ 1703.702469] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52598bb9-1f57-7b94-f1f3-c73b4e9257ff" [ 1703.702469] env[63371]: _type = "Task" [ 1703.702469] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.711426] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1703.711426] env[63371]: value = "task-1774531" [ 1703.711426] env[63371]: _type = "Task" [ 1703.711426] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.719730] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52598bb9-1f57-7b94-f1f3-c73b4e9257ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.719730] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.728057] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774531, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.750107] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774530, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.759491] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774528, 'name': CreateVM_Task, 'duration_secs': 0.519468} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.762845] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1703.763506] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.806264] env[63371]: DEBUG nova.compute.manager [req-b56d023d-bfeb-4c63-8f7f-d550de83e301 req-92ec4c3c-b6d4-4a73-8d10-9c6336944889 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Received event network-vif-deleted-cf8050ea-381c-487b-9981-c3f042d673e1 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1703.806496] env[63371]: INFO nova.compute.manager [req-b56d023d-bfeb-4c63-8f7f-d550de83e301 req-92ec4c3c-b6d4-4a73-8d10-9c6336944889 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Neutron deleted interface cf8050ea-381c-487b-9981-c3f042d673e1; detaching it from the instance and deleting it from the info cache [ 1703.807308] env[63371]: DEBUG nova.network.neutron [req-b56d023d-bfeb-4c63-8f7f-d550de83e301 req-92ec4c3c-b6d4-4a73-8d10-9c6336944889 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1703.864515] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7f70b3-a87d-4d3f-b055-666fab8eadd3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.876098] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8799626-be3b-4fe5-9a76-43ed3155713b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.921662] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1803547-0a7d-497c-8ad3-a3d0d5a8d485 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.930039] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96019174-58c9-4e96-b841-e995d7dba077 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.946916] env[63371]: DEBUG nova.compute.provider_tree [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1703.954757] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.071954] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.100809] env[63371]: DEBUG oslo_vmware.api [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774521, 'name': RemoveSnapshot_Task, 'duration_secs': 1.598456} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.105304] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1704.106293] env[63371]: INFO nova.compute.manager [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Took 16.11 seconds to snapshot the instance on the hypervisor. [ 1704.116775] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774529, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.666311} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.117110] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3da99cec-409f-4ea0-891c-2e9d7429674d/3da99cec-409f-4ea0-891c-2e9d7429674d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1704.117298] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1704.117607] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e7fd902d-b795-4611-ab2f-1a16e594135b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.128838] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1704.128838] env[63371]: value = "task-1774532" [ 1704.128838] env[63371]: _type = "Task" [ 1704.128838] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.139848] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774532, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.221602] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52598bb9-1f57-7b94-f1f3-c73b4e9257ff, 'name': SearchDatastore_Task, 'duration_secs': 0.028148} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.222427] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.222713] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1704.223010] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.224437] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.224437] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1704.224437] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.224437] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1704.224437] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-72ac8466-a65d-4153-9869-463a51c12728 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.230370] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f682707-f0f3-40c6-aa59-c2114f71016d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.231601] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774531, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.239430] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1704.239430] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523932cb-7315-d953-ec20-7866094faec3" [ 1704.239430] env[63371]: _type = "Task" [ 1704.239430] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.247027] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774530, 'name': RemoveSnapshot_Task, 'duration_secs': 0.740297} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.251124] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1704.251437] env[63371]: DEBUG nova.compute.manager [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1704.252355] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523932cb-7315-d953-ec20-7866094faec3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.252541] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1704.252726] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1704.254267] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f93821-b7ee-46eb-8550-d9ede393e21e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.256499] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89e25be8-3ba4-48db-a6d3-711214491a52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.270948] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Waiting for the task: (returnval){ [ 1704.270948] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52beb3fc-8781-b7b2-993f-531a013c522a" [ 1704.270948] env[63371]: _type = "Task" [ 1704.270948] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.276274] env[63371]: DEBUG nova.network.neutron [-] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1704.281163] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52beb3fc-8781-b7b2-993f-531a013c522a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.312483] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b9b052d-d8c4-4384-8e5a-86303010143e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.324078] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952f9774-e617-4976-a69c-d99d78e37270 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.360972] env[63371]: DEBUG nova.compute.manager [req-b56d023d-bfeb-4c63-8f7f-d550de83e301 req-92ec4c3c-b6d4-4a73-8d10-9c6336944889 service nova] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Detach interface failed, port_id=cf8050ea-381c-487b-9981-c3f042d673e1, reason: Instance 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1704.450557] env[63371]: DEBUG nova.scheduler.client.report [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1704.647374] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774532, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100339} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.649539] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1704.650014] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aefcb91e-1c9d-4378-b2d3-dcee96a0ea7a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.676086] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 3da99cec-409f-4ea0-891c-2e9d7429674d/3da99cec-409f-4ea0-891c-2e9d7429674d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1704.678536] env[63371]: DEBUG nova.compute.manager [None req-1359f873-5613-4f93-af80-fc151cc0a1bd tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Found 2 images (rotation: 2) {{(pid=63371) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1704.679587] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad4b6436-66d5-4c8e-a03e-abdfe901bc20 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.701592] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1704.701592] env[63371]: value = "task-1774533" [ 1704.701592] env[63371]: _type = "Task" [ 1704.701592] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.710799] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774533, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.723747] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774531, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602046} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.724149] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 382a5997-90bb-4bbc-b595-23c8d2f2e1f0/382a5997-90bb-4bbc-b595-23c8d2f2e1f0.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1704.724396] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1704.724708] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-552e1aac-21b3-48fb-9a98-e7e18d6b4b47 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.733341] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1704.733341] env[63371]: value = "task-1774534" [ 1704.733341] env[63371]: _type = "Task" [ 1704.733341] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.744143] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774534, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.753480] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523932cb-7315-d953-ec20-7866094faec3, 'name': SearchDatastore_Task, 'duration_secs': 0.05588} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.753783] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.754137] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1704.754218] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.778390] env[63371]: INFO nova.compute.manager [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Shelve offloading [ 1704.780574] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1704.780824] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26e1c5e2-71e7-4098-bf10-16204a9053de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.782642] env[63371]: INFO nova.compute.manager [-] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Took 1.70 seconds to deallocate network for instance. [ 1704.788507] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52beb3fc-8781-b7b2-993f-531a013c522a, 'name': SearchDatastore_Task, 'duration_secs': 0.032371} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.794091] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79143d15-9004-4000-8752-de9ba7a0bc7f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.796548] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1704.796548] env[63371]: value = "task-1774535" [ 1704.796548] env[63371]: _type = "Task" [ 1704.796548] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.802089] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Waiting for the task: (returnval){ [ 1704.802089] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d79854-b94b-9f95-c8a9-3201788518c5" [ 1704.802089] env[63371]: _type = "Task" [ 1704.802089] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.810368] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1704.810368] env[63371]: DEBUG nova.compute.manager [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1704.811053] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1c621a-50b9-47e6-9a57-785cb1e660f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.817218] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d79854-b94b-9f95-c8a9-3201788518c5, 'name': SearchDatastore_Task, 'duration_secs': 0.010858} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1704.817876] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1704.818235] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 6f31d6ad-480d-40dd-924e-f6277d93c99a/6f31d6ad-480d-40dd-924e-f6277d93c99a.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1704.818486] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.818674] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1704.818949] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-535d1a7a-aff0-436b-8ade-34bd69e5d189 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.822271] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-486531de-f8b9-421a-8f60-445c616ecf8d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.824347] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1704.825452] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1704.825452] env[63371]: DEBUG nova.network.neutron [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1704.832813] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Waiting for the task: (returnval){ [ 1704.832813] env[63371]: value = "task-1774536" [ 1704.832813] env[63371]: _type = "Task" [ 1704.832813] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.838349] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1704.838534] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1704.839615] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-996d4b4e-a6a7-486a-827b-bb67a2d90257 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.845368] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774536, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.848907] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1704.848907] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5248b1f7-9c81-9bb2-d77d-58c6d0dbc7f7" [ 1704.848907] env[63371]: _type = "Task" [ 1704.848907] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.858908] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5248b1f7-9c81-9bb2-d77d-58c6d0dbc7f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.956495] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.957079] env[63371]: DEBUG nova.compute.manager [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1704.960013] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.837s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.960705] env[63371]: DEBUG nova.objects.instance [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lazy-loading 'resources' on Instance uuid e781866e-9b26-47c7-b1a6-d6d9547bf2fd {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1705.212815] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774533, 'name': ReconfigVM_Task, 'duration_secs': 0.340928} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.212964] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 3da99cec-409f-4ea0-891c-2e9d7429674d/3da99cec-409f-4ea0-891c-2e9d7429674d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1705.213692] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4b40344-1569-46fe-aaed-fada751717ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.225201] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1705.225201] env[63371]: value = "task-1774537" [ 1705.225201] env[63371]: _type = "Task" [ 1705.225201] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.235044] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774537, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.245698] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774534, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073239} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.245996] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1705.246972] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f144840d-4002-42e7-b4ed-48ce7e575d00 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.272968] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 382a5997-90bb-4bbc-b595-23c8d2f2e1f0/382a5997-90bb-4bbc-b595-23c8d2f2e1f0.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1705.273384] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d5ae576-a7ce-4cee-a930-405c72b8a042 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.296570] env[63371]: DEBUG oslo_concurrency.lockutils [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1705.296939] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1705.296939] env[63371]: value = "task-1774538" [ 1705.296939] env[63371]: _type = "Task" [ 1705.296939] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.308568] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774538, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.346852] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774536, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.361254] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5248b1f7-9c81-9bb2-d77d-58c6d0dbc7f7, 'name': SearchDatastore_Task, 'duration_secs': 0.011465} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.364580] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed7a58ac-3174-4d53-9737-31a7b589c5e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.371508] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1705.371508] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526c0c2f-4f42-d188-1339-f38cead0e60c" [ 1705.371508] env[63371]: _type = "Task" [ 1705.371508] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.380845] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526c0c2f-4f42-d188-1339-f38cead0e60c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.466081] env[63371]: DEBUG nova.compute.utils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1705.467759] env[63371]: DEBUG nova.compute.manager [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1705.468039] env[63371]: DEBUG nova.network.neutron [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1705.514648] env[63371]: DEBUG nova.policy [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1b2f698ebd747d6a84ac3f3e05e97b3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a5b81b233f640b186d9798ff57a4945', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1705.556080] env[63371]: DEBUG nova.network.neutron [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updating instance_info_cache with network_info: [{"id": "d3f41a80-52de-46a5-ac15-9a26e6710908", "address": "fa:16:3e:f6:cd:6b", "network": {"id": "9c25e5e9-468d-4d4c-93e0-c9815eff1c2e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-814005109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e693d73d70140c2ba065de2b60838c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3f41a80-52", "ovs_interfaceid": "d3f41a80-52de-46a5-ac15-9a26e6710908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1705.741304] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774537, 'name': Rename_Task, 'duration_secs': 0.30704} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.741679] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1705.742283] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b19c6be-0269-42fc-8dcf-a7b7b55f109d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.750587] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1705.750587] env[63371]: value = "task-1774539" [ 1705.750587] env[63371]: _type = "Task" [ 1705.750587] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.766851] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774539, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.812998] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774538, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.844818] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774536, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.559816} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.846395] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 6f31d6ad-480d-40dd-924e-f6277d93c99a/6f31d6ad-480d-40dd-924e-f6277d93c99a.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1705.846395] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1705.846395] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-838fdf48-9a30-404b-8676-1a85b31bf98b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.850310] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f93475c-0fae-45cf-8ef3-6d7af93b2dc4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.859467] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d50939e-1b9a-436b-8905-4afcd93d1725 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.863665] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Waiting for the task: (returnval){ [ 1705.863665] env[63371]: value = "task-1774540" [ 1705.863665] env[63371]: _type = "Task" [ 1705.863665] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.864844] env[63371]: DEBUG nova.network.neutron [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Successfully created port: 0fec120d-e875-4254-bf67-1c749227262a {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1705.905616] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd0acf1-16e0-4b51-90f0-1e47141c8b3a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.907877] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774540, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.915205] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526c0c2f-4f42-d188-1339-f38cead0e60c, 'name': SearchDatastore_Task, 'duration_secs': 0.011363} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.917287] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.917552] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cf63c2a2-ee72-464e-944d-5e53ca8635ac/cf63c2a2-ee72-464e-944d-5e53ca8635ac.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1705.917850] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1146265d-aac8-4178-bb57-77c060dc085f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.920692] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ea8be4-dba8-43f6-9fb6-9058132c8acb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.935385] env[63371]: DEBUG nova.compute.provider_tree [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1705.937788] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1705.937788] env[63371]: value = "task-1774541" [ 1705.937788] env[63371]: _type = "Task" [ 1705.937788] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.947447] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.970787] env[63371]: DEBUG nova.compute.manager [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1706.060700] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Releasing lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.094871] env[63371]: DEBUG nova.compute.manager [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1706.095786] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9280d58e-5da8-4ef1-a28e-a516da444246 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.265306] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774539, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.312013] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774538, 'name': ReconfigVM_Task, 'duration_secs': 0.605875} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.312431] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 382a5997-90bb-4bbc-b595-23c8d2f2e1f0/382a5997-90bb-4bbc-b595-23c8d2f2e1f0.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1706.313275] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58fad1c2-3260-426d-a33c-62fc1bb15938 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.324345] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1706.324345] env[63371]: value = "task-1774542" [ 1706.324345] env[63371]: _type = "Task" [ 1706.324345] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.336747] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774542, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.343437] env[63371]: DEBUG nova.compute.manager [req-40b0e0ac-0d33-4558-a467-763e6e30f454 req-67429726-2843-4c15-9fbd-89cf6967229b service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Received event network-vif-unplugged-d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1706.343659] env[63371]: DEBUG oslo_concurrency.lockutils [req-40b0e0ac-0d33-4558-a467-763e6e30f454 req-67429726-2843-4c15-9fbd-89cf6967229b service nova] Acquiring lock "9985dbcd-4498-4629-aae5-5e1933307c50-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.343867] env[63371]: DEBUG oslo_concurrency.lockutils [req-40b0e0ac-0d33-4558-a467-763e6e30f454 req-67429726-2843-4c15-9fbd-89cf6967229b service nova] Lock "9985dbcd-4498-4629-aae5-5e1933307c50-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.344041] env[63371]: DEBUG oslo_concurrency.lockutils [req-40b0e0ac-0d33-4558-a467-763e6e30f454 req-67429726-2843-4c15-9fbd-89cf6967229b service nova] Lock "9985dbcd-4498-4629-aae5-5e1933307c50-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.344212] env[63371]: DEBUG nova.compute.manager [req-40b0e0ac-0d33-4558-a467-763e6e30f454 req-67429726-2843-4c15-9fbd-89cf6967229b service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] No waiting events found dispatching network-vif-unplugged-d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1706.344381] env[63371]: WARNING nova.compute.manager [req-40b0e0ac-0d33-4558-a467-763e6e30f454 req-67429726-2843-4c15-9fbd-89cf6967229b service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Received unexpected event network-vif-unplugged-d3f41a80-52de-46a5-ac15-9a26e6710908 for instance with vm_state shelved and task_state shelving_offloading. [ 1706.354851] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1706.355744] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ca76e3-8ed1-4d3a-906d-f5307e8365c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.365193] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1706.365650] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bcfd3561-363d-4463-b3f1-a173d499ef34 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.378782] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774540, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071427} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.379117] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1706.380013] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02151ef4-3726-4cd3-8f11-939d399a518d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.404357] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 6f31d6ad-480d-40dd-924e-f6277d93c99a/6f31d6ad-480d-40dd-924e-f6277d93c99a.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1706.404808] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b26d78c6-2513-4546-bf7a-6777ec35b32a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.426689] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Waiting for the task: (returnval){ [ 1706.426689] env[63371]: value = "task-1774544" [ 1706.426689] env[63371]: _type = "Task" [ 1706.426689] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.437294] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774544, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.439459] env[63371]: DEBUG nova.scheduler.client.report [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1706.460709] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774541, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.479591] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1706.479984] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1706.479984] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Deleting the datastore file [datastore1] 9985dbcd-4498-4629-aae5-5e1933307c50 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1706.480817] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9170b051-5fcf-4912-8a3a-713a92396bb8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.488258] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1706.488258] env[63371]: value = "task-1774545" [ 1706.488258] env[63371]: _type = "Task" [ 1706.488258] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.498090] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774545, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.608099] env[63371]: INFO nova.compute.manager [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] instance snapshotting [ 1706.608729] env[63371]: DEBUG nova.objects.instance [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'flavor' on Instance uuid 9862b0f0-ccf6-4e69-9e78-cf864adaa65e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1706.767536] env[63371]: DEBUG oslo_vmware.api [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774539, 'name': PowerOnVM_Task, 'duration_secs': 0.951425} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.767951] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1706.768734] env[63371]: DEBUG nova.compute.manager [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1706.769670] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80df5b98-57e1-488e-b5fd-4af58fac1ada {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.838767] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774542, 'name': Rename_Task, 'duration_secs': 0.438536} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.839401] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1706.839401] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-00c4b135-9def-481b-a5b2-faf20a7b8409 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.846902] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1706.846902] env[63371]: value = "task-1774546" [ 1706.846902] env[63371]: _type = "Task" [ 1706.846902] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.858283] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774546, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.937987] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774544, 'name': ReconfigVM_Task, 'duration_secs': 0.349354} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.938392] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 6f31d6ad-480d-40dd-924e-f6277d93c99a/6f31d6ad-480d-40dd-924e-f6277d93c99a.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1706.939051] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae95ffe0-f226-43cd-b8c1-7e42b091eaac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.948671] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Waiting for the task: (returnval){ [ 1706.948671] env[63371]: value = "task-1774547" [ 1706.948671] env[63371]: _type = "Task" [ 1706.948671] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.955677] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.996s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.957546] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.927946} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.958045] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.798s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.959535] env[63371]: INFO nova.compute.claims [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1706.963223] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cf63c2a2-ee72-464e-944d-5e53ca8635ac/cf63c2a2-ee72-464e-944d-5e53ca8635ac.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1706.963462] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1706.963948] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ce1a867-a9b8-474b-a59d-e6a48ab2c9dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.970787] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774547, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.977465] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1706.977465] env[63371]: value = "task-1774548" [ 1706.977465] env[63371]: _type = "Task" [ 1706.977465] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.981658] env[63371]: INFO nova.scheduler.client.report [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleted allocations for instance e781866e-9b26-47c7-b1a6-d6d9547bf2fd [ 1706.983612] env[63371]: DEBUG nova.compute.manager [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1706.997210] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774548, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.003501] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774545, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.013310] env[63371]: DEBUG nova.virt.hardware [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1707.013574] env[63371]: DEBUG nova.virt.hardware [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1707.013740] env[63371]: DEBUG nova.virt.hardware [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1707.013969] env[63371]: DEBUG nova.virt.hardware [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1707.014137] env[63371]: DEBUG nova.virt.hardware [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1707.014284] env[63371]: DEBUG nova.virt.hardware [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1707.014498] env[63371]: DEBUG nova.virt.hardware [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1707.014650] env[63371]: DEBUG nova.virt.hardware [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1707.014833] env[63371]: DEBUG nova.virt.hardware [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1707.015012] env[63371]: DEBUG nova.virt.hardware [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1707.015206] env[63371]: DEBUG nova.virt.hardware [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1707.016404] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da7bbbd-4cba-4fb1-94e9-ea250e6a7e9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.027075] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c65690a-d5d8-4c28-bb97-385a5d7c4f4e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.114750] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8f858ca-e58c-4268-8266-f069f471b397 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.134443] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe482a7d-8632-4a06-9c6e-2a96b53ddb27 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.287821] env[63371]: DEBUG oslo_concurrency.lockutils [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.323092] env[63371]: DEBUG nova.compute.manager [req-297001c9-e51c-4025-af9f-ab378f1358fe req-5865a1b1-8d8e-49e2-a73f-963535d9122f service nova] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Received event network-vif-plugged-0fec120d-e875-4254-bf67-1c749227262a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1707.323092] env[63371]: DEBUG oslo_concurrency.lockutils [req-297001c9-e51c-4025-af9f-ab378f1358fe req-5865a1b1-8d8e-49e2-a73f-963535d9122f service nova] Acquiring lock "935cf583-ecde-4a10-a773-6ff765e5bb49-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.323092] env[63371]: DEBUG oslo_concurrency.lockutils [req-297001c9-e51c-4025-af9f-ab378f1358fe req-5865a1b1-8d8e-49e2-a73f-963535d9122f service nova] Lock "935cf583-ecde-4a10-a773-6ff765e5bb49-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.323092] env[63371]: DEBUG oslo_concurrency.lockutils [req-297001c9-e51c-4025-af9f-ab378f1358fe req-5865a1b1-8d8e-49e2-a73f-963535d9122f service nova] Lock "935cf583-ecde-4a10-a773-6ff765e5bb49-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.323092] env[63371]: DEBUG nova.compute.manager [req-297001c9-e51c-4025-af9f-ab378f1358fe req-5865a1b1-8d8e-49e2-a73f-963535d9122f service nova] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] No waiting events found dispatching network-vif-plugged-0fec120d-e875-4254-bf67-1c749227262a {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1707.323092] env[63371]: WARNING nova.compute.manager [req-297001c9-e51c-4025-af9f-ab378f1358fe req-5865a1b1-8d8e-49e2-a73f-963535d9122f service nova] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Received unexpected event network-vif-plugged-0fec120d-e875-4254-bf67-1c749227262a for instance with vm_state building and task_state spawning. [ 1707.357668] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774546, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.413210] env[63371]: DEBUG nova.network.neutron [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Successfully updated port: 0fec120d-e875-4254-bf67-1c749227262a {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1707.461553] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774547, 'name': Rename_Task, 'duration_secs': 0.148837} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.461829] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1707.462122] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a90e65e1-f3d5-4e9f-b79c-3dea8e7e14f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.472639] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Waiting for the task: (returnval){ [ 1707.472639] env[63371]: value = "task-1774549" [ 1707.472639] env[63371]: _type = "Task" [ 1707.472639] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.487720] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774549, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.500577] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774548, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072213} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.501245] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1707.501752] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c9c05a98-92b5-4814-a4e2-84467123ba97 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "e781866e-9b26-47c7-b1a6-d6d9547bf2fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.927s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.503200] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c3ed08-d12d-4801-ab03-9c9e3148eac1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.509245] env[63371]: DEBUG oslo_vmware.api [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774545, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.605523} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.509906] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1707.510209] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1707.510333] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1707.530830] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] cf63c2a2-ee72-464e-944d-5e53ca8635ac/cf63c2a2-ee72-464e-944d-5e53ca8635ac.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1707.532784] env[63371]: INFO nova.scheduler.client.report [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Deleted allocations for instance 9985dbcd-4498-4629-aae5-5e1933307c50 [ 1707.535377] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7a341f2-ffcd-4328-841a-39f6673be171 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.557979] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1707.557979] env[63371]: value = "task-1774550" [ 1707.557979] env[63371]: _type = "Task" [ 1707.557979] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.568818] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774550, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.645329] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1707.645885] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9162a03c-aee5-4674-9d0c-cdbda06f02e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.651068] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquiring lock "3da99cec-409f-4ea0-891c-2e9d7429674d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.651389] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lock "3da99cec-409f-4ea0-891c-2e9d7429674d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.651641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquiring lock "3da99cec-409f-4ea0-891c-2e9d7429674d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.651870] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lock "3da99cec-409f-4ea0-891c-2e9d7429674d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1707.652156] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lock "3da99cec-409f-4ea0-891c-2e9d7429674d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1707.655525] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1707.655525] env[63371]: value = "task-1774551" [ 1707.655525] env[63371]: _type = "Task" [ 1707.655525] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.656100] env[63371]: INFO nova.compute.manager [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Terminating instance [ 1707.662330] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquiring lock "refresh_cache-3da99cec-409f-4ea0-891c-2e9d7429674d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.662583] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquired lock "refresh_cache-3da99cec-409f-4ea0-891c-2e9d7429674d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.662821] env[63371]: DEBUG nova.network.neutron [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1707.669754] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774551, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.858164] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774546, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.917260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "refresh_cache-935cf583-ecde-4a10-a773-6ff765e5bb49" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1707.917648] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "refresh_cache-935cf583-ecde-4a10-a773-6ff765e5bb49" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1707.917648] env[63371]: DEBUG nova.network.neutron [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1707.983046] env[63371]: DEBUG oslo_vmware.api [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774549, 'name': PowerOnVM_Task, 'duration_secs': 0.464447} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.983381] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1707.983616] env[63371]: INFO nova.compute.manager [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Took 5.46 seconds to spawn the instance on the hypervisor. [ 1707.983813] env[63371]: DEBUG nova.compute.manager [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1707.984700] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d175e51-d8e7-4d0a-b1b3-680d8684418b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.052921] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.070943] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774550, 'name': ReconfigVM_Task, 'duration_secs': 0.324779} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.073743] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Reconfigured VM instance instance-00000056 to attach disk [datastore1] cf63c2a2-ee72-464e-944d-5e53ca8635ac/cf63c2a2-ee72-464e-944d-5e53ca8635ac.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1708.074673] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f2c9e18-e009-42c3-9dc7-d2652f3957f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.081885] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1708.081885] env[63371]: value = "task-1774552" [ 1708.081885] env[63371]: _type = "Task" [ 1708.081885] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.095661] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774552, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.170288] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774551, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.186878] env[63371]: DEBUG nova.network.neutron [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1708.244755] env[63371]: DEBUG nova.network.neutron [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.273517] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50faecd6-a69d-4935-8178-7a7e8b5f3e0b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.283348] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a6d0a1-3cd9-46ec-a6e0-1e3387972a8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.316500] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b189a1d5-4a61-4589-9dae-7e8d458e7e05 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.329680] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9350d6dd-f1cd-4e10-87ac-e8f06bc1b1cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.350435] env[63371]: DEBUG nova.compute.provider_tree [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1708.363505] env[63371]: DEBUG oslo_vmware.api [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774546, 'name': PowerOnVM_Task, 'duration_secs': 1.101451} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.363756] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1708.363959] env[63371]: INFO nova.compute.manager [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Took 11.94 seconds to spawn the instance on the hypervisor. [ 1708.364148] env[63371]: DEBUG nova.compute.manager [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1708.364923] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2dd2933-f606-4271-8110-f09fd28baac5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.462615] env[63371]: DEBUG nova.network.neutron [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1708.469870] env[63371]: DEBUG nova.compute.manager [req-3eb88705-dcb9-4973-a9f8-83643b191f76 req-0d99f1cf-50cd-4d92-a7f1-7f00fd37e910 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Received event network-changed-d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1708.470094] env[63371]: DEBUG nova.compute.manager [req-3eb88705-dcb9-4973-a9f8-83643b191f76 req-0d99f1cf-50cd-4d92-a7f1-7f00fd37e910 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Refreshing instance network info cache due to event network-changed-d3f41a80-52de-46a5-ac15-9a26e6710908. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1708.470379] env[63371]: DEBUG oslo_concurrency.lockutils [req-3eb88705-dcb9-4973-a9f8-83643b191f76 req-0d99f1cf-50cd-4d92-a7f1-7f00fd37e910 service nova] Acquiring lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1708.470543] env[63371]: DEBUG oslo_concurrency.lockutils [req-3eb88705-dcb9-4973-a9f8-83643b191f76 req-0d99f1cf-50cd-4d92-a7f1-7f00fd37e910 service nova] Acquired lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1708.470707] env[63371]: DEBUG nova.network.neutron [req-3eb88705-dcb9-4973-a9f8-83643b191f76 req-0d99f1cf-50cd-4d92-a7f1-7f00fd37e910 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Refreshing network info cache for port d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1708.503773] env[63371]: INFO nova.compute.manager [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Took 25.02 seconds to build instance. [ 1708.593512] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774552, 'name': Rename_Task, 'duration_secs': 0.244124} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.593798] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1708.594061] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ba50dd2-6528-4120-99ab-cc2f80b457d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.604018] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1708.604018] env[63371]: value = "task-1774553" [ 1708.604018] env[63371]: _type = "Task" [ 1708.604018] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.608809] env[63371]: DEBUG nova.network.neutron [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Updating instance_info_cache with network_info: [{"id": "0fec120d-e875-4254-bf67-1c749227262a", "address": "fa:16:3e:1e:3d:e6", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fec120d-e8", "ovs_interfaceid": "0fec120d-e875-4254-bf67-1c749227262a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1708.616598] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774553, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.668790] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774551, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.748183] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Releasing lock "refresh_cache-3da99cec-409f-4ea0-891c-2e9d7429674d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1708.748672] env[63371]: DEBUG nova.compute.manager [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1708.748921] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1708.750520] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9dd9ee-7c1a-4069-a567-78887705c1f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.761265] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1708.761608] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-19bdc31f-0d00-47d9-808e-92c479646167 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.769606] env[63371]: DEBUG oslo_vmware.api [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1708.769606] env[63371]: value = "task-1774554" [ 1708.769606] env[63371]: _type = "Task" [ 1708.769606] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.780701] env[63371]: DEBUG oslo_vmware.api [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774554, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.858205] env[63371]: DEBUG nova.scheduler.client.report [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1708.894160] env[63371]: INFO nova.compute.manager [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Took 29.79 seconds to build instance. [ 1709.008075] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4c2126e4-8749-4b64-9914-d6cf34f27824 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Lock "6f31d6ad-480d-40dd-924e-f6277d93c99a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.530s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.113858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "refresh_cache-935cf583-ecde-4a10-a773-6ff765e5bb49" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1709.115147] env[63371]: DEBUG nova.compute.manager [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Instance network_info: |[{"id": "0fec120d-e875-4254-bf67-1c749227262a", "address": "fa:16:3e:1e:3d:e6", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fec120d-e8", "ovs_interfaceid": "0fec120d-e875-4254-bf67-1c749227262a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1709.116466] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774553, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.116836] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:3d:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0fec120d-e875-4254-bf67-1c749227262a', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1709.127465] env[63371]: DEBUG oslo.service.loopingcall [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1709.128319] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1709.128548] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-487b9f58-1c70-47a2-b121-752c592706ab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.154808] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1709.154808] env[63371]: value = "task-1774555" [ 1709.154808] env[63371]: _type = "Task" [ 1709.154808] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.167769] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774555, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.175206] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774551, 'name': CreateSnapshot_Task, 'duration_secs': 1.129901} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.175865] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1709.176686] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafd77ea-3a69-416a-8455-00a04e8f9fb5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.280406] env[63371]: DEBUG oslo_vmware.api [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774554, 'name': PowerOffVM_Task, 'duration_secs': 0.223745} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.280692] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1709.280854] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1709.281134] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cb44c0ec-fab3-407b-855c-1414da318c9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.283930] env[63371]: DEBUG nova.network.neutron [req-3eb88705-dcb9-4973-a9f8-83643b191f76 req-0d99f1cf-50cd-4d92-a7f1-7f00fd37e910 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updated VIF entry in instance network info cache for port d3f41a80-52de-46a5-ac15-9a26e6710908. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1709.284274] env[63371]: DEBUG nova.network.neutron [req-3eb88705-dcb9-4973-a9f8-83643b191f76 req-0d99f1cf-50cd-4d92-a7f1-7f00fd37e910 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updating instance_info_cache with network_info: [{"id": "d3f41a80-52de-46a5-ac15-9a26e6710908", "address": "fa:16:3e:f6:cd:6b", "network": {"id": "9c25e5e9-468d-4d4c-93e0-c9815eff1c2e", "bridge": null, "label": "tempest-ServersNegativeTestJSON-814005109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e693d73d70140c2ba065de2b60838c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd3f41a80-52", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1709.313125] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1709.313389] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1709.313594] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Deleting the datastore file [datastore1] 3da99cec-409f-4ea0-891c-2e9d7429674d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1709.313859] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-38f6740c-75fb-4d51-96e5-cb196f267211 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.323617] env[63371]: DEBUG oslo_vmware.api [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for the task: (returnval){ [ 1709.323617] env[63371]: value = "task-1774557" [ 1709.323617] env[63371]: _type = "Task" [ 1709.323617] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.331995] env[63371]: DEBUG oslo_concurrency.lockutils [None req-523bafcd-3515-4b0f-9f0b-d1cf2364b2d2 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.337745] env[63371]: DEBUG oslo_vmware.api [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774557, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.352874] env[63371]: DEBUG nova.compute.manager [req-c4bd54e9-3ba8-4433-b542-8142b9fb5661 req-bc5db965-673c-4888-835b-98ac8953fd30 service nova] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Received event network-changed-0fec120d-e875-4254-bf67-1c749227262a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1709.352874] env[63371]: DEBUG nova.compute.manager [req-c4bd54e9-3ba8-4433-b542-8142b9fb5661 req-bc5db965-673c-4888-835b-98ac8953fd30 service nova] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Refreshing instance network info cache due to event network-changed-0fec120d-e875-4254-bf67-1c749227262a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1709.353032] env[63371]: DEBUG oslo_concurrency.lockutils [req-c4bd54e9-3ba8-4433-b542-8142b9fb5661 req-bc5db965-673c-4888-835b-98ac8953fd30 service nova] Acquiring lock "refresh_cache-935cf583-ecde-4a10-a773-6ff765e5bb49" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.353146] env[63371]: DEBUG oslo_concurrency.lockutils [req-c4bd54e9-3ba8-4433-b542-8142b9fb5661 req-bc5db965-673c-4888-835b-98ac8953fd30 service nova] Acquired lock "refresh_cache-935cf583-ecde-4a10-a773-6ff765e5bb49" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1709.353381] env[63371]: DEBUG nova.network.neutron [req-c4bd54e9-3ba8-4433-b542-8142b9fb5661 req-bc5db965-673c-4888-835b-98ac8953fd30 service nova] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Refreshing network info cache for port 0fec120d-e875-4254-bf67-1c749227262a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1709.366435] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.366923] env[63371]: DEBUG nova.compute.manager [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1709.370893] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.427s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.372659] env[63371]: INFO nova.compute.claims [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1709.393358] env[63371]: DEBUG nova.compute.manager [None req-273d2afc-132a-4386-81da-73662975c3a2 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1709.394878] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5cb08c8-cc0e-4932-973a-39e10c1f3cf7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.398309] env[63371]: DEBUG oslo_concurrency.lockutils [None req-28ad438b-27d3-4b81-b0e7-dbcf20d53050 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.303s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.398949] env[63371]: DEBUG oslo_concurrency.lockutils [None req-523bafcd-3515-4b0f-9f0b-d1cf2364b2d2 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.068s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.399137] env[63371]: DEBUG nova.compute.manager [None req-523bafcd-3515-4b0f-9f0b-d1cf2364b2d2 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1709.400766] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529f227e-2c58-4da3-b0e4-73e95bb84b18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.415855] env[63371]: DEBUG nova.compute.manager [None req-523bafcd-3515-4b0f-9f0b-d1cf2364b2d2 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63371) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1709.415855] env[63371]: DEBUG nova.objects.instance [None req-523bafcd-3515-4b0f-9f0b-d1cf2364b2d2 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lazy-loading 'flavor' on Instance uuid 382a5997-90bb-4bbc-b595-23c8d2f2e1f0 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1709.461023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "9985dbcd-4498-4629-aae5-5e1933307c50" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.562344] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Acquiring lock "6f31d6ad-480d-40dd-924e-f6277d93c99a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.562860] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Lock "6f31d6ad-480d-40dd-924e-f6277d93c99a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.562860] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Acquiring lock "6f31d6ad-480d-40dd-924e-f6277d93c99a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1709.563036] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Lock "6f31d6ad-480d-40dd-924e-f6277d93c99a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.563193] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Lock "6f31d6ad-480d-40dd-924e-f6277d93c99a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.565304] env[63371]: INFO nova.compute.manager [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Terminating instance [ 1709.566938] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Acquiring lock "refresh_cache-6f31d6ad-480d-40dd-924e-f6277d93c99a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1709.567112] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Acquired lock "refresh_cache-6f31d6ad-480d-40dd-924e-f6277d93c99a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1709.567293] env[63371]: DEBUG nova.network.neutron [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1709.616121] env[63371]: DEBUG oslo_vmware.api [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774553, 'name': PowerOnVM_Task, 'duration_secs': 0.618732} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.616383] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1709.616582] env[63371]: INFO nova.compute.manager [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Took 9.57 seconds to spawn the instance on the hypervisor. [ 1709.616754] env[63371]: DEBUG nova.compute.manager [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1709.617543] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dda2a98-0b34-4fe5-9327-220abb1b7687 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.671054] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774555, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.698770] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1709.699057] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-38b24916-6c0c-4429-a9e0-d674a8055f7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.708293] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1709.708293] env[63371]: value = "task-1774558" [ 1709.708293] env[63371]: _type = "Task" [ 1709.708293] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.718679] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774558, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.787330] env[63371]: DEBUG oslo_concurrency.lockutils [req-3eb88705-dcb9-4973-a9f8-83643b191f76 req-0d99f1cf-50cd-4d92-a7f1-7f00fd37e910 service nova] Releasing lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1709.836359] env[63371]: DEBUG oslo_vmware.api [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Task: {'id': task-1774557, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.232632} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.836631] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1709.836819] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1709.837275] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1709.837275] env[63371]: INFO nova.compute.manager [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1709.837447] env[63371]: DEBUG oslo.service.loopingcall [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1709.837589] env[63371]: DEBUG nova.compute.manager [-] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1709.837684] env[63371]: DEBUG nova.network.neutron [-] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1709.855311] env[63371]: DEBUG nova.network.neutron [-] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1709.872098] env[63371]: DEBUG nova.compute.utils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1709.873719] env[63371]: DEBUG nova.compute.manager [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1709.873895] env[63371]: DEBUG nova.network.neutron [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1709.911473] env[63371]: INFO nova.compute.manager [None req-273d2afc-132a-4386-81da-73662975c3a2 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] instance snapshotting [ 1709.912151] env[63371]: DEBUG nova.objects.instance [None req-273d2afc-132a-4386-81da-73662975c3a2 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Lazy-loading 'flavor' on Instance uuid 6f31d6ad-480d-40dd-924e-f6277d93c99a {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1709.919439] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-523bafcd-3515-4b0f-9f0b-d1cf2364b2d2 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1709.919693] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a6fbeb1-9a37-45bb-a5d8-76cb51755d5d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.931393] env[63371]: DEBUG oslo_vmware.api [None req-523bafcd-3515-4b0f-9f0b-d1cf2364b2d2 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1709.931393] env[63371]: value = "task-1774559" [ 1709.931393] env[63371]: _type = "Task" [ 1709.931393] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.933515] env[63371]: DEBUG nova.policy [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ef97c1a9a174c1888972e6f281eecbe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2601d597b4d64481ace490d56d1056a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1709.951927] env[63371]: DEBUG oslo_vmware.api [None req-523bafcd-3515-4b0f-9f0b-d1cf2364b2d2 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774559, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.087665] env[63371]: DEBUG nova.network.neutron [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1710.142242] env[63371]: DEBUG nova.network.neutron [req-c4bd54e9-3ba8-4433-b542-8142b9fb5661 req-bc5db965-673c-4888-835b-98ac8953fd30 service nova] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Updated VIF entry in instance network info cache for port 0fec120d-e875-4254-bf67-1c749227262a. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1710.142242] env[63371]: DEBUG nova.network.neutron [req-c4bd54e9-3ba8-4433-b542-8142b9fb5661 req-bc5db965-673c-4888-835b-98ac8953fd30 service nova] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Updating instance_info_cache with network_info: [{"id": "0fec120d-e875-4254-bf67-1c749227262a", "address": "fa:16:3e:1e:3d:e6", "network": {"id": "f8dd691e-b6b6-4644-83da-329c1cb84240", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1654783997-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a5b81b233f640b186d9798ff57a4945", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0fec120d-e8", "ovs_interfaceid": "0fec120d-e875-4254-bf67-1c749227262a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1710.142242] env[63371]: INFO nova.compute.manager [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Took 26.68 seconds to build instance. [ 1710.168154] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774555, 'name': CreateVM_Task, 'duration_secs': 0.649217} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.168505] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1710.169133] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1710.169329] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1710.169685] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1710.169941] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9610414e-a571-47f9-8efe-41a4762fe47d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.175525] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1710.175525] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52237595-5fd8-6ca1-66ee-129daeffc2e7" [ 1710.175525] env[63371]: _type = "Task" [ 1710.175525] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.184409] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52237595-5fd8-6ca1-66ee-129daeffc2e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.192981] env[63371]: DEBUG nova.network.neutron [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1710.219668] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774558, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.334043] env[63371]: DEBUG nova.network.neutron [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Successfully created port: 0169bee8-0cc2-4add-b53b-0dfecac574d2 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1710.358078] env[63371]: DEBUG nova.network.neutron [-] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1710.377784] env[63371]: DEBUG nova.compute.manager [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1710.418876] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da37da0-4665-4cd0-8dd1-3deb5508cb66 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.445474] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a88a10a-460d-4fbc-b78e-fa6f18df0397 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.459018] env[63371]: DEBUG oslo_vmware.api [None req-523bafcd-3515-4b0f-9f0b-d1cf2364b2d2 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774559, 'name': PowerOffVM_Task, 'duration_secs': 0.219492} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.461771] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-523bafcd-3515-4b0f-9f0b-d1cf2364b2d2 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1710.461957] env[63371]: DEBUG nova.compute.manager [None req-523bafcd-3515-4b0f-9f0b-d1cf2364b2d2 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1710.462900] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824fa0ff-ebc4-4396-bb15-b984e5847da8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.643313] env[63371]: DEBUG oslo_concurrency.lockutils [req-c4bd54e9-3ba8-4433-b542-8142b9fb5661 req-bc5db965-673c-4888-835b-98ac8953fd30 service nova] Releasing lock "refresh_cache-935cf583-ecde-4a10-a773-6ff765e5bb49" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1710.643859] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aac3440b-301b-417e-ad11-0de9c2d9c00f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "cf63c2a2-ee72-464e-944d-5e53ca8635ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.194s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.690575] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52237595-5fd8-6ca1-66ee-129daeffc2e7, 'name': SearchDatastore_Task, 'duration_secs': 0.010529} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.690984] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1710.691285] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1710.691552] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1710.691760] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1710.691974] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1710.692283] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-08297c89-a3ee-4c8c-bedd-6736b31f25fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.694981] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Releasing lock "refresh_cache-6f31d6ad-480d-40dd-924e-f6277d93c99a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1710.695453] env[63371]: DEBUG nova.compute.manager [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1710.695694] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1710.699192] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78f65ef-2ff0-4176-9c6b-3b0a09b3e266 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.705277] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1710.705277] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1710.706306] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fa346fb-b63c-416a-bf0a-8a4fca21f29d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.711401] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1710.715838] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09283eb7-c6ce-49a3-9802-506481287a1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.721929] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1710.721929] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52af3fd4-1fc9-0204-28cf-44a93370038b" [ 1710.721929] env[63371]: _type = "Task" [ 1710.721929] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.730361] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774558, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.732246] env[63371]: DEBUG oslo_vmware.api [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Waiting for the task: (returnval){ [ 1710.732246] env[63371]: value = "task-1774560" [ 1710.732246] env[63371]: _type = "Task" [ 1710.732246] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.743034] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52af3fd4-1fc9-0204-28cf-44a93370038b, 'name': SearchDatastore_Task, 'duration_secs': 0.012028} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.744422] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8518192f-a50a-48c0-a8d8-793e0a0d8548 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.754166] env[63371]: DEBUG oslo_vmware.api [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774560, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.761094] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1710.761094] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520e78b0-0484-6a04-c825-df0b4f826220" [ 1710.761094] env[63371]: _type = "Task" [ 1710.761094] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1710.774754] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520e78b0-0484-6a04-c825-df0b4f826220, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.813222] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-019b71a9-ecab-4c3c-abca-3e5c50bd8dd0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.822655] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff92058-e546-4e28-84ca-bff21745b2f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.859162] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1470dea8-166e-4d28-9df9-3d0ccfc4d1c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.862474] env[63371]: INFO nova.compute.manager [-] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Took 1.02 seconds to deallocate network for instance. [ 1710.873784] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0eff11-757e-4920-93db-e5a5720bdd0f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1710.896543] env[63371]: DEBUG nova.compute.provider_tree [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1710.963055] env[63371]: DEBUG nova.compute.manager [None req-273d2afc-132a-4386-81da-73662975c3a2 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Instance disappeared during snapshot {{(pid=63371) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4495}} [ 1710.974852] env[63371]: DEBUG oslo_concurrency.lockutils [None req-523bafcd-3515-4b0f-9f0b-d1cf2364b2d2 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.576s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.106028] env[63371]: DEBUG nova.compute.manager [None req-273d2afc-132a-4386-81da-73662975c3a2 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Found 0 images (rotation: 2) {{(pid=63371) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1711.222913] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774558, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.245330] env[63371]: DEBUG oslo_vmware.api [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774560, 'name': PowerOffVM_Task, 'duration_secs': 0.358043} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.245895] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1711.245895] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1711.246477] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ef373a1-d90f-4511-93fb-37f6dc51a45b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.272024] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520e78b0-0484-6a04-c825-df0b4f826220, 'name': SearchDatastore_Task, 'duration_secs': 0.01343} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.272353] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1711.272635] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 935cf583-ecde-4a10-a773-6ff765e5bb49/935cf583-ecde-4a10-a773-6ff765e5bb49.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1711.273342] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8718785e-9a42-4c84-a1d0-46b3e8ad0749 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.279352] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1711.279521] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1711.279712] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Deleting the datastore file [datastore1] 6f31d6ad-480d-40dd-924e-f6277d93c99a {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1711.279986] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15ca95e0-e506-4909-9a7b-5e7242d3905a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.283084] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1711.283084] env[63371]: value = "task-1774562" [ 1711.283084] env[63371]: _type = "Task" [ 1711.283084] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.288469] env[63371]: DEBUG oslo_vmware.api [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Waiting for the task: (returnval){ [ 1711.288469] env[63371]: value = "task-1774563" [ 1711.288469] env[63371]: _type = "Task" [ 1711.288469] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.299093] env[63371]: DEBUG oslo_vmware.api [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.379161] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.397567] env[63371]: DEBUG nova.compute.manager [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1711.400912] env[63371]: DEBUG nova.scheduler.client.report [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1711.424667] env[63371]: DEBUG nova.virt.hardware [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1711.424667] env[63371]: DEBUG nova.virt.hardware [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1711.424667] env[63371]: DEBUG nova.virt.hardware [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1711.424667] env[63371]: DEBUG nova.virt.hardware [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1711.424667] env[63371]: DEBUG nova.virt.hardware [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1711.424994] env[63371]: DEBUG nova.virt.hardware [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1711.424994] env[63371]: DEBUG nova.virt.hardware [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1711.425210] env[63371]: DEBUG nova.virt.hardware [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1711.425210] env[63371]: DEBUG nova.virt.hardware [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1711.425875] env[63371]: DEBUG nova.virt.hardware [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1711.425875] env[63371]: DEBUG nova.virt.hardware [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1711.426994] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6fe6c94-da10-43cb-891e-490b523bf3d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.434926] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd3e01c-0295-4d23-b74a-86a5adcaf918 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.690172] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "485a2d6a-1b58-470d-9dc5-8cf31b6726ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.690545] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "485a2d6a-1b58-470d-9dc5-8cf31b6726ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.725395] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774558, 'name': CloneVM_Task, 'duration_secs': 1.644268} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.725642] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Created linked-clone VM from snapshot [ 1711.726463] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f380610-7c70-4202-8049-fc9ba4bcfbe1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.737019] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Uploading image 9e175974-83af-4327-8123-1c04de287277 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1711.767298] env[63371]: DEBUG oslo_vmware.rw_handles [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1711.767298] env[63371]: value = "vm-368444" [ 1711.767298] env[63371]: _type = "VirtualMachine" [ 1711.767298] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1711.767655] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-58f5f5a8-0de3-42bb-a702-1c0ac68158ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.778949] env[63371]: DEBUG oslo_vmware.rw_handles [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lease: (returnval){ [ 1711.778949] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ab4d9b-c280-eb95-9744-c51239333b92" [ 1711.778949] env[63371]: _type = "HttpNfcLease" [ 1711.778949] env[63371]: } obtained for exporting VM: (result){ [ 1711.778949] env[63371]: value = "vm-368444" [ 1711.778949] env[63371]: _type = "VirtualMachine" [ 1711.778949] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1711.779353] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the lease: (returnval){ [ 1711.779353] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ab4d9b-c280-eb95-9744-c51239333b92" [ 1711.779353] env[63371]: _type = "HttpNfcLease" [ 1711.779353] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1711.801421] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1711.801421] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ab4d9b-c280-eb95-9744-c51239333b92" [ 1711.801421] env[63371]: _type = "HttpNfcLease" [ 1711.801421] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1711.801839] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774562, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.806320] env[63371]: DEBUG oslo_vmware.rw_handles [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1711.806320] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ab4d9b-c280-eb95-9744-c51239333b92" [ 1711.806320] env[63371]: _type = "HttpNfcLease" [ 1711.806320] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1711.806320] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b72956-7123-4cac-956b-9943924b2420 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.810969] env[63371]: DEBUG oslo_vmware.api [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Task: {'id': task-1774563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138776} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1711.811828] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1711.812069] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1711.812310] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1711.812523] env[63371]: INFO nova.compute.manager [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1711.812832] env[63371]: DEBUG oslo.service.loopingcall [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1711.813193] env[63371]: DEBUG nova.compute.manager [-] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1711.813271] env[63371]: DEBUG nova.network.neutron [-] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1711.820269] env[63371]: DEBUG oslo_vmware.rw_handles [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5266a018-103e-787e-cd5b-e6796c59fff8/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1711.820511] env[63371]: DEBUG oslo_vmware.rw_handles [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5266a018-103e-787e-cd5b-e6796c59fff8/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1711.882090] env[63371]: DEBUG nova.network.neutron [-] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1711.894238] env[63371]: DEBUG nova.compute.manager [req-cc3e6c5e-3d8d-4307-8755-15d3f5719559 req-7f7de187-13e1-4f27-a1b1-e737f190b860 service nova] [instance: 158259a4-f54a-4192-b235-f03838193516] Received event network-vif-plugged-0169bee8-0cc2-4add-b53b-0dfecac574d2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1711.894474] env[63371]: DEBUG oslo_concurrency.lockutils [req-cc3e6c5e-3d8d-4307-8755-15d3f5719559 req-7f7de187-13e1-4f27-a1b1-e737f190b860 service nova] Acquiring lock "158259a4-f54a-4192-b235-f03838193516-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.894923] env[63371]: DEBUG oslo_concurrency.lockutils [req-cc3e6c5e-3d8d-4307-8755-15d3f5719559 req-7f7de187-13e1-4f27-a1b1-e737f190b860 service nova] Lock "158259a4-f54a-4192-b235-f03838193516-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.894923] env[63371]: DEBUG oslo_concurrency.lockutils [req-cc3e6c5e-3d8d-4307-8755-15d3f5719559 req-7f7de187-13e1-4f27-a1b1-e737f190b860 service nova] Lock "158259a4-f54a-4192-b235-f03838193516-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.895180] env[63371]: DEBUG nova.compute.manager [req-cc3e6c5e-3d8d-4307-8755-15d3f5719559 req-7f7de187-13e1-4f27-a1b1-e737f190b860 service nova] [instance: 158259a4-f54a-4192-b235-f03838193516] No waiting events found dispatching network-vif-plugged-0169bee8-0cc2-4add-b53b-0dfecac574d2 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1711.895894] env[63371]: WARNING nova.compute.manager [req-cc3e6c5e-3d8d-4307-8755-15d3f5719559 req-7f7de187-13e1-4f27-a1b1-e737f190b860 service nova] [instance: 158259a4-f54a-4192-b235-f03838193516] Received unexpected event network-vif-plugged-0169bee8-0cc2-4add-b53b-0dfecac574d2 for instance with vm_state building and task_state spawning. [ 1711.906270] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.535s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.906752] env[63371]: DEBUG nova.compute.manager [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1711.909706] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.623s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.909901] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.911908] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.366s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.912144] env[63371]: DEBUG nova.objects.instance [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lazy-loading 'resources' on Instance uuid 195de525-1081-4db6-acf3-04a6d3eb142f {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1711.921024] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-74b9532a-c331-4158-bd69-eafedc884bc8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.941269] env[63371]: INFO nova.scheduler.client.report [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted allocations for instance e1bc4623-f6b5-4440-a58d-594e9cbe3628 [ 1711.970448] env[63371]: DEBUG oslo_concurrency.lockutils [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.970448] env[63371]: DEBUG oslo_concurrency.lockutils [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.970448] env[63371]: DEBUG oslo_concurrency.lockutils [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1711.970448] env[63371]: DEBUG oslo_concurrency.lockutils [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1711.970448] env[63371]: DEBUG oslo_concurrency.lockutils [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1711.972171] env[63371]: INFO nova.compute.manager [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Terminating instance [ 1711.974134] env[63371]: DEBUG nova.compute.manager [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1711.978046] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1711.978046] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34296337-71b4-494e-9c1a-d07ed1927355 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.985031] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1711.985031] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7157ccac-6f27-43d3-9de3-99123e34c0bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.005801] env[63371]: DEBUG nova.network.neutron [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Successfully updated port: 0169bee8-0cc2-4add-b53b-0dfecac574d2 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1712.192821] env[63371]: DEBUG nova.compute.manager [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1712.264480] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1712.264938] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1712.264938] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleting the datastore file [datastore1] 382a5997-90bb-4bbc-b595-23c8d2f2e1f0 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1712.264938] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5e878609-6e51-4a2d-9e17-6d47767ab693 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.273974] env[63371]: DEBUG oslo_vmware.api [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1712.273974] env[63371]: value = "task-1774566" [ 1712.273974] env[63371]: _type = "Task" [ 1712.273974] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.284698] env[63371]: DEBUG oslo_vmware.api [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774566, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.293958] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774562, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.574042} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.294506] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 935cf583-ecde-4a10-a773-6ff765e5bb49/935cf583-ecde-4a10-a773-6ff765e5bb49.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1712.296077] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1712.296077] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b74ff22b-9088-4d43-9dbe-098db6ab4c87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.304098] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1712.304098] env[63371]: value = "task-1774567" [ 1712.304098] env[63371]: _type = "Task" [ 1712.304098] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.314520] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774567, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.386011] env[63371]: DEBUG nova.network.neutron [-] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1712.423071] env[63371]: DEBUG nova.compute.utils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1712.425711] env[63371]: DEBUG nova.compute.manager [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1712.429167] env[63371]: DEBUG nova.network.neutron [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1712.453753] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50752e48-6250-4193-911c-aacdc0d6825e tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "e1bc4623-f6b5-4440-a58d-594e9cbe3628" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.165s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.499366] env[63371]: DEBUG nova.policy [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31b76ca90f31495287b332ebb3001dff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e96348bcfea1455dad72945c7c36f027', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1712.507810] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "refresh_cache-158259a4-f54a-4192-b235-f03838193516" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1712.508097] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "refresh_cache-158259a4-f54a-4192-b235-f03838193516" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.508264] env[63371]: DEBUG nova.network.neutron [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1712.713961] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.794065] env[63371]: DEBUG oslo_vmware.api [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774566, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19318} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.794667] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1712.795208] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1712.795420] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1712.795924] env[63371]: INFO nova.compute.manager [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Took 0.82 seconds to destroy the instance on the hypervisor. [ 1712.796163] env[63371]: DEBUG oslo.service.loopingcall [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1712.799605] env[63371]: DEBUG nova.compute.manager [-] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1712.799712] env[63371]: DEBUG nova.network.neutron [-] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1712.815978] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774567, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093705} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.817225] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1712.820027] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a99428c-12c8-4048-afc2-1d3630be602d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.821166] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9dc941-ba86-40b3-b3a3-e644e14c6a31 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.841091] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baffd347-794f-4ca1-86b4-76de3c3a105a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.857269] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 935cf583-ecde-4a10-a773-6ff765e5bb49/935cf583-ecde-4a10-a773-6ff765e5bb49.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1712.861133] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-289833be-3570-490c-8006-53bf545324f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.880042] env[63371]: DEBUG nova.network.neutron [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Successfully created port: e144cd6b-c3f5-496e-99c6-19e9ab58c042 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1712.918804] env[63371]: INFO nova.compute.manager [-] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Took 1.11 seconds to deallocate network for instance. [ 1712.922905] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b81d9791-ef68-4dd9-ab73-4652ed5f67ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.926060] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1712.926060] env[63371]: value = "task-1774568" [ 1712.926060] env[63371]: _type = "Task" [ 1712.926060] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.931026] env[63371]: DEBUG nova.compute.manager [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1712.940534] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2824ac-7c34-4098-b93d-56a32921a285 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.949593] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.961394] env[63371]: DEBUG nova.compute.provider_tree [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1713.063958] env[63371]: DEBUG nova.network.neutron [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1713.255989] env[63371]: DEBUG nova.network.neutron [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Updating instance_info_cache with network_info: [{"id": "0169bee8-0cc2-4add-b53b-0dfecac574d2", "address": "fa:16:3e:40:6f:e8", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0169bee8-0c", "ovs_interfaceid": "0169bee8-0cc2-4add-b53b-0dfecac574d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1713.438240] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.452772] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774568, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.487677] env[63371]: ERROR nova.scheduler.client.report [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] [req-9967bedf-491e-4088-afe2-a909f93544ad] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9967bedf-491e-4088-afe2-a909f93544ad"}]} [ 1713.513662] env[63371]: DEBUG nova.scheduler.client.report [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1713.538351] env[63371]: DEBUG nova.scheduler.client.report [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1713.538351] env[63371]: DEBUG nova.compute.provider_tree [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1713.550865] env[63371]: DEBUG nova.scheduler.client.report [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1713.575397] env[63371]: DEBUG nova.scheduler.client.report [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1713.760523] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "refresh_cache-158259a4-f54a-4192-b235-f03838193516" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1713.760955] env[63371]: DEBUG nova.compute.manager [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Instance network_info: |[{"id": "0169bee8-0cc2-4add-b53b-0dfecac574d2", "address": "fa:16:3e:40:6f:e8", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0169bee8-0c", "ovs_interfaceid": "0169bee8-0cc2-4add-b53b-0dfecac574d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1713.763900] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:6f:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0169bee8-0cc2-4add-b53b-0dfecac574d2', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1713.771656] env[63371]: DEBUG oslo.service.loopingcall [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1713.772060] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 158259a4-f54a-4192-b235-f03838193516] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1713.772338] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c4354f8-bb95-4776-82fc-3afb55c021f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.796624] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1713.796624] env[63371]: value = "task-1774569" [ 1713.796624] env[63371]: _type = "Task" [ 1713.796624] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.808571] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774569, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.931239] env[63371]: DEBUG nova.compute.manager [req-01aed27b-7561-4934-888c-1ab78c0103a8 req-54e043f8-7719-462e-be2d-51421a0a2886 service nova] [instance: 158259a4-f54a-4192-b235-f03838193516] Received event network-changed-0169bee8-0cc2-4add-b53b-0dfecac574d2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1713.931532] env[63371]: DEBUG nova.compute.manager [req-01aed27b-7561-4934-888c-1ab78c0103a8 req-54e043f8-7719-462e-be2d-51421a0a2886 service nova] [instance: 158259a4-f54a-4192-b235-f03838193516] Refreshing instance network info cache due to event network-changed-0169bee8-0cc2-4add-b53b-0dfecac574d2. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1713.931884] env[63371]: DEBUG oslo_concurrency.lockutils [req-01aed27b-7561-4934-888c-1ab78c0103a8 req-54e043f8-7719-462e-be2d-51421a0a2886 service nova] Acquiring lock "refresh_cache-158259a4-f54a-4192-b235-f03838193516" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1713.932095] env[63371]: DEBUG oslo_concurrency.lockutils [req-01aed27b-7561-4934-888c-1ab78c0103a8 req-54e043f8-7719-462e-be2d-51421a0a2886 service nova] Acquired lock "refresh_cache-158259a4-f54a-4192-b235-f03838193516" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1713.932380] env[63371]: DEBUG nova.network.neutron [req-01aed27b-7561-4934-888c-1ab78c0103a8 req-54e043f8-7719-462e-be2d-51421a0a2886 service nova] [instance: 158259a4-f54a-4192-b235-f03838193516] Refreshing network info cache for port 0169bee8-0cc2-4add-b53b-0dfecac574d2 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1713.942175] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc4bccf-3ecc-4532-9b65-49f0eea8c3d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.955991] env[63371]: DEBUG nova.compute.manager [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1713.958285] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774568, 'name': ReconfigVM_Task, 'duration_secs': 0.627401} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.959389] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 935cf583-ecde-4a10-a773-6ff765e5bb49/935cf583-ecde-4a10-a773-6ff765e5bb49.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1713.960997] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a0c8042-5c22-4285-b68d-fcb600d67c61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.964479] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3bde1e06-cd17-4fa2-9a08-a80fef4653dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.002205] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b48613-0fda-4290-83cb-e0a3f894ee4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.005592] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1714.005592] env[63371]: value = "task-1774570" [ 1714.005592] env[63371]: _type = "Task" [ 1714.005592] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.015481] env[63371]: DEBUG nova.virt.hardware [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1714.015856] env[63371]: DEBUG nova.virt.hardware [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1714.016159] env[63371]: DEBUG nova.virt.hardware [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1714.016353] env[63371]: DEBUG nova.virt.hardware [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1714.016558] env[63371]: DEBUG nova.virt.hardware [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1714.016740] env[63371]: DEBUG nova.virt.hardware [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1714.016987] env[63371]: DEBUG nova.virt.hardware [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1714.017184] env[63371]: DEBUG nova.virt.hardware [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1714.017378] env[63371]: DEBUG nova.virt.hardware [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1714.017552] env[63371]: DEBUG nova.virt.hardware [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1714.017751] env[63371]: DEBUG nova.virt.hardware [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1714.019659] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38a4142b-429f-4a44-b2fe-791c786589eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.024573] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50af8050-c76b-4f4d-b891-8d9493714cfe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.031502] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774570, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.046586] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66290c80-798e-495c-811b-4af4d4bf8696 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.051037] env[63371]: DEBUG nova.network.neutron [-] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.052644] env[63371]: DEBUG nova.compute.provider_tree [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1714.309729] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774569, 'name': CreateVM_Task, 'duration_secs': 0.481785} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.309942] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 158259a4-f54a-4192-b235-f03838193516] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1714.310848] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.311074] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.311409] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1714.311733] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbdddd72-c9ee-408d-b337-830a6d8db9b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.318118] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1714.318118] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52193cf1-6fec-507f-7ad9-1eb9f8423b1f" [ 1714.318118] env[63371]: _type = "Task" [ 1714.318118] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.328389] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52193cf1-6fec-507f-7ad9-1eb9f8423b1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.516678] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774570, 'name': Rename_Task, 'duration_secs': 0.237111} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.517028] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1714.517320] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1f0c461-12aa-4815-9b02-83cbafe0f666 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.526532] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1714.526532] env[63371]: value = "task-1774571" [ 1714.526532] env[63371]: _type = "Task" [ 1714.526532] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.538580] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774571, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.556963] env[63371]: INFO nova.compute.manager [-] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Took 1.76 seconds to deallocate network for instance. [ 1714.592852] env[63371]: DEBUG nova.scheduler.client.report [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 128 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1714.593166] env[63371]: DEBUG nova.compute.provider_tree [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 128 to 129 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1714.593400] env[63371]: DEBUG nova.compute.provider_tree [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1714.790555] env[63371]: DEBUG nova.network.neutron [req-01aed27b-7561-4934-888c-1ab78c0103a8 req-54e043f8-7719-462e-be2d-51421a0a2886 service nova] [instance: 158259a4-f54a-4192-b235-f03838193516] Updated VIF entry in instance network info cache for port 0169bee8-0cc2-4add-b53b-0dfecac574d2. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1714.790972] env[63371]: DEBUG nova.network.neutron [req-01aed27b-7561-4934-888c-1ab78c0103a8 req-54e043f8-7719-462e-be2d-51421a0a2886 service nova] [instance: 158259a4-f54a-4192-b235-f03838193516] Updating instance_info_cache with network_info: [{"id": "0169bee8-0cc2-4add-b53b-0dfecac574d2", "address": "fa:16:3e:40:6f:e8", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0169bee8-0c", "ovs_interfaceid": "0169bee8-0cc2-4add-b53b-0dfecac574d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.833486] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52193cf1-6fec-507f-7ad9-1eb9f8423b1f, 'name': SearchDatastore_Task, 'duration_secs': 0.011419} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.833868] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1714.834173] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1714.834451] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.834645] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.835093] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1714.835165] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87dbc84b-95a7-444a-9152-9b82ea4eca8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.845715] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1714.845923] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1714.846792] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b62fa92a-cece-4cd6-8885-80391b17194a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.854423] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1714.854423] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525b7fa2-5615-731a-1976-3b3ecb0cc6a0" [ 1714.854423] env[63371]: _type = "Task" [ 1714.854423] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.863451] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525b7fa2-5615-731a-1976-3b3ecb0cc6a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.025249] env[63371]: DEBUG nova.network.neutron [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Successfully updated port: e144cd6b-c3f5-496e-99c6-19e9ab58c042 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1715.038636] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774571, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.065727] env[63371]: DEBUG oslo_concurrency.lockutils [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.101541] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.189s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.104223] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.806s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.105976] env[63371]: INFO nova.compute.claims [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1715.124884] env[63371]: INFO nova.scheduler.client.report [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Deleted allocations for instance 195de525-1081-4db6-acf3-04a6d3eb142f [ 1715.294764] env[63371]: DEBUG oslo_concurrency.lockutils [req-01aed27b-7561-4934-888c-1ab78c0103a8 req-54e043f8-7719-462e-be2d-51421a0a2886 service nova] Releasing lock "refresh_cache-158259a4-f54a-4192-b235-f03838193516" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.295015] env[63371]: DEBUG nova.compute.manager [req-01aed27b-7561-4934-888c-1ab78c0103a8 req-54e043f8-7719-462e-be2d-51421a0a2886 service nova] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Received event network-vif-deleted-421d7cf6-e899-4181-9f5b-07a12ec96caf {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1715.295285] env[63371]: INFO nova.compute.manager [req-01aed27b-7561-4934-888c-1ab78c0103a8 req-54e043f8-7719-462e-be2d-51421a0a2886 service nova] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Neutron deleted interface 421d7cf6-e899-4181-9f5b-07a12ec96caf; detaching it from the instance and deleting it from the info cache [ 1715.295501] env[63371]: DEBUG nova.network.neutron [req-01aed27b-7561-4934-888c-1ab78c0103a8 req-54e043f8-7719-462e-be2d-51421a0a2886 service nova] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1715.365101] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525b7fa2-5615-731a-1976-3b3ecb0cc6a0, 'name': SearchDatastore_Task, 'duration_secs': 0.010686} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.365903] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c75e3d5d-95ab-47f5-915f-8bcfe54b400f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.371430] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1715.371430] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f698a1-6818-d76d-a910-24ab628aa8ae" [ 1715.371430] env[63371]: _type = "Task" [ 1715.371430] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.379298] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f698a1-6818-d76d-a910-24ab628aa8ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.531813] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1715.531962] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.532121] env[63371]: DEBUG nova.network.neutron [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1715.539233] env[63371]: DEBUG oslo_vmware.api [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774571, 'name': PowerOnVM_Task, 'duration_secs': 0.615992} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.539736] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1715.539933] env[63371]: INFO nova.compute.manager [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Took 8.56 seconds to spawn the instance on the hypervisor. [ 1715.540120] env[63371]: DEBUG nova.compute.manager [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1715.540889] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-603082f0-b210-4e69-90a7-11d28e9225d5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.634264] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76e0cce0-e8a1-4937-ae20-3cbd856a433e tempest-ServersWithSpecificFlavorTestJSON-81130433 tempest-ServersWithSpecificFlavorTestJSON-81130433-project-member] Lock "195de525-1081-4db6-acf3-04a6d3eb142f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.520s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.798323] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7e0103e-67a5-454c-8e4f-7fa0b4f1191c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.809153] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1c0d17f-c788-4f40-bb71-7c3d6b673de6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.846511] env[63371]: DEBUG nova.compute.manager [req-01aed27b-7561-4934-888c-1ab78c0103a8 req-54e043f8-7719-462e-be2d-51421a0a2886 service nova] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Detach interface failed, port_id=421d7cf6-e899-4181-9f5b-07a12ec96caf, reason: Instance 382a5997-90bb-4bbc-b595-23c8d2f2e1f0 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1715.882439] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f698a1-6818-d76d-a910-24ab628aa8ae, 'name': SearchDatastore_Task, 'duration_secs': 0.014085} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.883266] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.883538] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 158259a4-f54a-4192-b235-f03838193516/158259a4-f54a-4192-b235-f03838193516.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1715.883806] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f37f6856-9ca8-40e3-8291-bd368e2061cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.893051] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1715.893051] env[63371]: value = "task-1774572" [ 1715.893051] env[63371]: _type = "Task" [ 1715.893051] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.902407] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774572, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.063853] env[63371]: INFO nova.compute.manager [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Took 26.94 seconds to build instance. [ 1716.091872] env[63371]: DEBUG nova.network.neutron [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1716.123437] env[63371]: DEBUG nova.compute.manager [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Received event network-vif-plugged-e144cd6b-c3f5-496e-99c6-19e9ab58c042 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1716.123955] env[63371]: DEBUG oslo_concurrency.lockutils [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] Acquiring lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.124318] env[63371]: DEBUG oslo_concurrency.lockutils [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] Lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.124633] env[63371]: DEBUG oslo_concurrency.lockutils [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] Lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.124917] env[63371]: DEBUG nova.compute.manager [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] No waiting events found dispatching network-vif-plugged-e144cd6b-c3f5-496e-99c6-19e9ab58c042 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1716.125231] env[63371]: WARNING nova.compute.manager [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Received unexpected event network-vif-plugged-e144cd6b-c3f5-496e-99c6-19e9ab58c042 for instance with vm_state building and task_state spawning. [ 1716.125525] env[63371]: DEBUG nova.compute.manager [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Received event network-changed-e144cd6b-c3f5-496e-99c6-19e9ab58c042 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1716.125793] env[63371]: DEBUG nova.compute.manager [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Refreshing instance network info cache due to event network-changed-e144cd6b-c3f5-496e-99c6-19e9ab58c042. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1716.126082] env[63371]: DEBUG oslo_concurrency.lockutils [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] Acquiring lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.266179] env[63371]: DEBUG nova.network.neutron [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance_info_cache with network_info: [{"id": "e144cd6b-c3f5-496e-99c6-19e9ab58c042", "address": "fa:16:3e:99:d0:57", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape144cd6b-c3", "ovs_interfaceid": "e144cd6b-c3f5-496e-99c6-19e9ab58c042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.410433] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774572, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.495146] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8c530b-5190-4947-b275-27aa386e4859 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.504597] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca54700-295a-44e6-97c9-e8718dc4e68f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.539581] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3e9ee6-95c8-4a9e-9bb8-0f013f77dd27 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.548889] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f959bc2-7cb7-475b-9a85-8267e65b1f77 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.564930] env[63371]: DEBUG nova.compute.provider_tree [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1716.568480] env[63371]: DEBUG oslo_concurrency.lockutils [None req-59bcac99-cc3b-4ef0-9987-c6b4a865c932 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "935cf583-ecde-4a10-a773-6ff765e5bb49" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.448s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.770356] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1716.770356] env[63371]: DEBUG nova.compute.manager [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Instance network_info: |[{"id": "e144cd6b-c3f5-496e-99c6-19e9ab58c042", "address": "fa:16:3e:99:d0:57", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape144cd6b-c3", "ovs_interfaceid": "e144cd6b-c3f5-496e-99c6-19e9ab58c042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1716.770356] env[63371]: DEBUG oslo_concurrency.lockutils [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] Acquired lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.770562] env[63371]: DEBUG nova.network.neutron [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Refreshing network info cache for port e144cd6b-c3f5-496e-99c6-19e9ab58c042 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1716.771813] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:99:d0:57', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e144cd6b-c3f5-496e-99c6-19e9ab58c042', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1716.782384] env[63371]: DEBUG oslo.service.loopingcall [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.786791] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1716.787133] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67e28286-f0b8-4a65-8a65-9ace1d4ed42e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.820430] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1716.820430] env[63371]: value = "task-1774573" [ 1716.820430] env[63371]: _type = "Task" [ 1716.820430] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.830922] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774573, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.905479] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774572, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.56582} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.906149] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 158259a4-f54a-4192-b235-f03838193516/158259a4-f54a-4192-b235-f03838193516.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1716.906149] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1716.906347] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-609e4235-890f-40cc-95d1-2df0c4cd0f4b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.915536] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1716.915536] env[63371]: value = "task-1774574" [ 1716.915536] env[63371]: _type = "Task" [ 1716.915536] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.927293] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774574, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.071502] env[63371]: DEBUG nova.scheduler.client.report [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1717.345587] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774573, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.426984] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774574, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085818} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.428309] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1717.429433] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d626b3-391a-4e63-bf1c-83c14d886e55 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.456357] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 158259a4-f54a-4192-b235-f03838193516/158259a4-f54a-4192-b235-f03838193516.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1717.460097] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b21ee8ba-0a38-4c35-b293-2595c4893836 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.483989] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1717.483989] env[63371]: value = "task-1774575" [ 1717.483989] env[63371]: _type = "Task" [ 1717.483989] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.492772] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774575, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.576309] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.472s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1717.576847] env[63371]: DEBUG nova.compute.manager [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1717.581162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.862s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1717.582742] env[63371]: INFO nova.compute.claims [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1717.626455] env[63371]: DEBUG nova.compute.manager [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1717.627455] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd754d2e-8396-4b1f-b7fe-e373920e4afe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.742509] env[63371]: DEBUG nova.network.neutron [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updated VIF entry in instance network info cache for port e144cd6b-c3f5-496e-99c6-19e9ab58c042. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1717.742509] env[63371]: DEBUG nova.network.neutron [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance_info_cache with network_info: [{"id": "e144cd6b-c3f5-496e-99c6-19e9ab58c042", "address": "fa:16:3e:99:d0:57", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape144cd6b-c3", "ovs_interfaceid": "e144cd6b-c3f5-496e-99c6-19e9ab58c042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.832052] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774573, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.834848] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "b523486c-adae-4322-80be-1f3bf33ca192" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1717.835464] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1717.994366] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.094495] env[63371]: DEBUG nova.compute.utils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1718.096685] env[63371]: DEBUG nova.compute.manager [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1718.097199] env[63371]: DEBUG nova.network.neutron [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1718.141629] env[63371]: INFO nova.compute.manager [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] instance snapshotting [ 1718.144658] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9090bc01-c05a-4c4f-9e78-6b9538aa9c45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.177103] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95e04d9f-634c-48fa-9396-a8790e924dcf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.204785] env[63371]: DEBUG nova.policy [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09ca20017c6245f9bae55080e98de838', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdcb553167e84358b2f89a0eb9fe09ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1718.245233] env[63371]: DEBUG oslo_concurrency.lockutils [req-f0a37377-ed20-4216-b641-566e78a1759b req-a6bd052b-2f15-4111-9ef8-1e295ff017f1 service nova] Releasing lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.333966] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774573, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.339021] env[63371]: INFO nova.compute.manager [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Detaching volume 16dfef5a-9dd9-48d8-b733-f65d801d0391 [ 1718.384450] env[63371]: INFO nova.virt.block_device [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Attempting to driver detach volume 16dfef5a-9dd9-48d8-b733-f65d801d0391 from mountpoint /dev/sdb [ 1718.384761] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1718.384971] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368419', 'volume_id': '16dfef5a-9dd9-48d8-b733-f65d801d0391', 'name': 'volume-16dfef5a-9dd9-48d8-b733-f65d801d0391', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b523486c-adae-4322-80be-1f3bf33ca192', 'attached_at': '', 'detached_at': '', 'volume_id': '16dfef5a-9dd9-48d8-b733-f65d801d0391', 'serial': '16dfef5a-9dd9-48d8-b733-f65d801d0391'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1718.386245] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a4b805d-dc1b-4c3a-95fa-d06276d6032f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.412459] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e63fde6-97f4-4da1-bc64-c4ed76b86f1c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.423043] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f91436-195d-48c5-b57c-cf7d20f992ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.450879] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c472014f-feb6-4364-818f-7421bf9798f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.469790] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] The volume has not been displaced from its original location: [datastore1] volume-16dfef5a-9dd9-48d8-b733-f65d801d0391/volume-16dfef5a-9dd9-48d8-b733-f65d801d0391.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1718.475960] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Reconfiguring VM instance instance-0000002f to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1718.476364] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1805d961-e3cf-4787-b521-2b5c7d0b3915 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.501740] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.503372] env[63371]: DEBUG oslo_vmware.api [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1718.503372] env[63371]: value = "task-1774576" [ 1718.503372] env[63371]: _type = "Task" [ 1718.503372] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.514221] env[63371]: DEBUG oslo_vmware.api [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774576, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.598044] env[63371]: DEBUG nova.compute.manager [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1718.691628] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1718.691943] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1ed3008f-0f77-4945-8807-aaa6b4e6b68b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.703186] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1718.703186] env[63371]: value = "task-1774577" [ 1718.703186] env[63371]: _type = "Task" [ 1718.703186] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.713460] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774577, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.794576] env[63371]: DEBUG nova.network.neutron [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Successfully created port: 5d6f97e2-eb9c-468d-8931-77a4c10ff125 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1718.836558] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774573, 'name': CreateVM_Task, 'duration_secs': 1.641856} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.836801] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1718.838185] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.838446] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.838800] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1718.839344] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18bc4354-679d-42f1-8a46-d34f91b8da98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.846528] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1718.846528] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52aae5c9-84df-3578-4d90-223d4df69273" [ 1718.846528] env[63371]: _type = "Task" [ 1718.846528] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.856235] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52aae5c9-84df-3578-4d90-223d4df69273, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.005509] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774575, 'name': ReconfigVM_Task, 'duration_secs': 1.116962} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.007889] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 158259a4-f54a-4192-b235-f03838193516/158259a4-f54a-4192-b235-f03838193516.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1719.011176] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-94842bba-0355-4368-a528-f0eff5b96db4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.019381] env[63371]: DEBUG oslo_vmware.api [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774576, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.020966] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1719.020966] env[63371]: value = "task-1774578" [ 1719.020966] env[63371]: _type = "Task" [ 1719.020966] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.030834] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774578, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.049762] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d531855-4aa1-438e-9a8a-6cf6393f7939 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.058522] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7435222f-8d45-4300-8a73-7ec499c76b0f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.096079] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c73e58-7848-476a-b08f-b862892713c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.106040] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6dc02da-22da-4c55-8a49-5f7042b6de43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.125787] env[63371]: DEBUG nova.compute.provider_tree [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1719.213845] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774577, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.358967] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52aae5c9-84df-3578-4d90-223d4df69273, 'name': SearchDatastore_Task, 'duration_secs': 0.198644} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.359367] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.359823] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1719.359976] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.360196] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.360410] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1719.360707] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-602a3eea-67ef-4a9d-9363-798ac5194606 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.371282] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1719.371481] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1719.372524] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21312361-6f61-430b-ae33-1aa6f0fe82d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.379624] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1719.379624] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5290b7e8-64ba-8336-7422-ff7f7425dc5d" [ 1719.379624] env[63371]: _type = "Task" [ 1719.379624] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.388851] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5290b7e8-64ba-8336-7422-ff7f7425dc5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.521815] env[63371]: DEBUG oslo_vmware.api [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774576, 'name': ReconfigVM_Task, 'duration_secs': 0.807648} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.522302] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Reconfigured VM instance instance-0000002f to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1719.531073] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1e31edf5-0d02-4d2a-9707-a9d5daa4f836 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.555307] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774578, 'name': Rename_Task, 'duration_secs': 0.254856} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.555307] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1719.555497] env[63371]: DEBUG oslo_vmware.api [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1719.555497] env[63371]: value = "task-1774579" [ 1719.555497] env[63371]: _type = "Task" [ 1719.555497] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.555648] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d8daf03-bf16-4d2d-ab54-2c71ab818a37 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.570546] env[63371]: DEBUG oslo_vmware.api [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774579, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.572624] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1719.572624] env[63371]: value = "task-1774580" [ 1719.572624] env[63371]: _type = "Task" [ 1719.572624] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.584258] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774580, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.616078] env[63371]: DEBUG nova.compute.manager [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1719.655896] env[63371]: DEBUG nova.virt.hardware [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1719.656153] env[63371]: DEBUG nova.virt.hardware [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1719.656310] env[63371]: DEBUG nova.virt.hardware [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1719.656488] env[63371]: DEBUG nova.virt.hardware [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1719.656629] env[63371]: DEBUG nova.virt.hardware [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1719.656772] env[63371]: DEBUG nova.virt.hardware [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1719.657138] env[63371]: DEBUG nova.virt.hardware [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1719.657220] env[63371]: DEBUG nova.virt.hardware [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1719.657403] env[63371]: DEBUG nova.virt.hardware [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1719.657585] env[63371]: DEBUG nova.virt.hardware [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1719.657806] env[63371]: DEBUG nova.virt.hardware [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1719.658919] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c53b24-3606-4e3b-8353-f7eb34c644c7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.665207] env[63371]: DEBUG nova.scheduler.client.report [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 129 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1719.665522] env[63371]: DEBUG nova.compute.provider_tree [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 129 to 130 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1719.665704] env[63371]: DEBUG nova.compute.provider_tree [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1719.677717] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3650609a-fc72-4690-9cfd-5212e962a2e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.713776] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774577, 'name': CreateSnapshot_Task, 'duration_secs': 0.941964} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.714073] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1719.714829] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f360c880-004b-4231-9fef-43718c080169 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.893809] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5290b7e8-64ba-8336-7422-ff7f7425dc5d, 'name': SearchDatastore_Task, 'duration_secs': 0.019296} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.894882] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27761aa9-2466-4331-a6d5-c50e8dc2fdf2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.901917] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1719.901917] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52af5dc2-9c22-2796-8212-bfa88ddc88a2" [ 1719.901917] env[63371]: _type = "Task" [ 1719.901917] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.910458] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52af5dc2-9c22-2796-8212-bfa88ddc88a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.069328] env[63371]: DEBUG oslo_vmware.api [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774579, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.083580] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774580, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.171701] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.590s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.175019] env[63371]: DEBUG nova.compute.manager [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1720.176179] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.222s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.176524] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1720.176820] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1720.177231] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.106s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1720.178744] env[63371]: INFO nova.compute.claims [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1720.182094] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca0bc35-735e-4d83-942a-1054ceb08e07 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.192666] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e2aa37-71f6-4736-80eb-2a84f20991b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.212697] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0dcfab-20ae-44b9-9f05-2013e5cd93a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.223354] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1940bb31-032e-45c3-9bf0-b20b69827589 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.235113] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1720.235113] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-543532be-e7b7-43ca-a4a9-94faecbe444a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.266167] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179422MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1720.266356] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1720.268206] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1720.268206] env[63371]: value = "task-1774581" [ 1720.268206] env[63371]: _type = "Task" [ 1720.268206] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.277867] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774581, 'name': CloneVM_Task} progress is 11%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.413780] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52af5dc2-9c22-2796-8212-bfa88ddc88a2, 'name': SearchDatastore_Task, 'duration_secs': 0.100877} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.414066] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.414430] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3a6c12a7-732f-4a73-a8c5-6810b554cc03/3a6c12a7-732f-4a73-a8c5-6810b554cc03.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1720.414724] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de3e2bba-3e73-4d35-97c1-507de23dffc8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.422888] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1720.422888] env[63371]: value = "task-1774582" [ 1720.422888] env[63371]: _type = "Task" [ 1720.422888] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.432966] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.570752] env[63371]: DEBUG oslo_vmware.api [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774579, 'name': ReconfigVM_Task, 'duration_secs': 0.587769} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.571092] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368419', 'volume_id': '16dfef5a-9dd9-48d8-b733-f65d801d0391', 'name': 'volume-16dfef5a-9dd9-48d8-b733-f65d801d0391', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'b523486c-adae-4322-80be-1f3bf33ca192', 'attached_at': '', 'detached_at': '', 'volume_id': '16dfef5a-9dd9-48d8-b733-f65d801d0391', 'serial': '16dfef5a-9dd9-48d8-b733-f65d801d0391'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1720.584926] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774580, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.683810] env[63371]: DEBUG nova.compute.utils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1720.685272] env[63371]: DEBUG nova.compute.manager [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1720.685449] env[63371]: DEBUG nova.network.neutron [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1720.783595] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774581, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.812990] env[63371]: DEBUG nova.policy [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09ca20017c6245f9bae55080e98de838', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdcb553167e84358b2f89a0eb9fe09ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1720.940724] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.025341] env[63371]: DEBUG nova.compute.manager [req-d12716ba-fe0e-4f4f-8d78-aeafc9bfeda8 req-4c73847c-16b3-4379-9369-16a4b10fc927 service nova] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Received event network-vif-plugged-5d6f97e2-eb9c-468d-8931-77a4c10ff125 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1721.027779] env[63371]: DEBUG oslo_concurrency.lockutils [req-d12716ba-fe0e-4f4f-8d78-aeafc9bfeda8 req-4c73847c-16b3-4379-9369-16a4b10fc927 service nova] Acquiring lock "11527051-7a4f-481a-b5ed-14550c550c4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.028248] env[63371]: DEBUG oslo_concurrency.lockutils [req-d12716ba-fe0e-4f4f-8d78-aeafc9bfeda8 req-4c73847c-16b3-4379-9369-16a4b10fc927 service nova] Lock "11527051-7a4f-481a-b5ed-14550c550c4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.028248] env[63371]: DEBUG oslo_concurrency.lockutils [req-d12716ba-fe0e-4f4f-8d78-aeafc9bfeda8 req-4c73847c-16b3-4379-9369-16a4b10fc927 service nova] Lock "11527051-7a4f-481a-b5ed-14550c550c4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.028384] env[63371]: DEBUG nova.compute.manager [req-d12716ba-fe0e-4f4f-8d78-aeafc9bfeda8 req-4c73847c-16b3-4379-9369-16a4b10fc927 service nova] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] No waiting events found dispatching network-vif-plugged-5d6f97e2-eb9c-468d-8931-77a4c10ff125 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1721.028548] env[63371]: WARNING nova.compute.manager [req-d12716ba-fe0e-4f4f-8d78-aeafc9bfeda8 req-4c73847c-16b3-4379-9369-16a4b10fc927 service nova] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Received unexpected event network-vif-plugged-5d6f97e2-eb9c-468d-8931-77a4c10ff125 for instance with vm_state building and task_state spawning. [ 1721.088077] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774580, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.130469] env[63371]: DEBUG nova.objects.instance [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'flavor' on Instance uuid b523486c-adae-4322-80be-1f3bf33ca192 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1721.190902] env[63371]: DEBUG nova.compute.manager [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1721.283045] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774581, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.332926] env[63371]: DEBUG nova.network.neutron [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Successfully created port: 8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1721.439137] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.581382] env[63371]: DEBUG nova.network.neutron [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Successfully updated port: 5d6f97e2-eb9c-468d-8931-77a4c10ff125 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1721.588067] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3165a9-a37d-41da-bacb-0f85869de0ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.597666] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774580, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.606570] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9355ea5c-fa99-4037-a09b-b20e46641515 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.612025] env[63371]: DEBUG nova.compute.manager [req-cc38a28f-3b4f-462d-97d0-6148a5160b87 req-57d441e9-b52e-418b-a860-a9e19fc81766 service nova] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Received event network-changed-5d6f97e2-eb9c-468d-8931-77a4c10ff125 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1721.614966] env[63371]: DEBUG nova.compute.manager [req-cc38a28f-3b4f-462d-97d0-6148a5160b87 req-57d441e9-b52e-418b-a860-a9e19fc81766 service nova] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Refreshing instance network info cache due to event network-changed-5d6f97e2-eb9c-468d-8931-77a4c10ff125. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1721.614966] env[63371]: DEBUG oslo_concurrency.lockutils [req-cc38a28f-3b4f-462d-97d0-6148a5160b87 req-57d441e9-b52e-418b-a860-a9e19fc81766 service nova] Acquiring lock "refresh_cache-11527051-7a4f-481a-b5ed-14550c550c4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.614966] env[63371]: DEBUG oslo_concurrency.lockutils [req-cc38a28f-3b4f-462d-97d0-6148a5160b87 req-57d441e9-b52e-418b-a860-a9e19fc81766 service nova] Acquired lock "refresh_cache-11527051-7a4f-481a-b5ed-14550c550c4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.614966] env[63371]: DEBUG nova.network.neutron [req-cc38a28f-3b4f-462d-97d0-6148a5160b87 req-57d441e9-b52e-418b-a860-a9e19fc81766 service nova] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Refreshing network info cache for port 5d6f97e2-eb9c-468d-8931-77a4c10ff125 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1721.653200] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f24f8a-27d9-4d2a-aaeb-ddb395beb609 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.664468] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8fd52ab-d9ee-4a6f-bb43-bc4153da03d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.683244] env[63371]: DEBUG nova.compute.provider_tree [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1721.722225] env[63371]: DEBUG oslo_vmware.rw_handles [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5266a018-103e-787e-cd5b-e6796c59fff8/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1721.722225] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc643bc-3838-471e-904b-b4cadc2a4dd7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.729626] env[63371]: DEBUG oslo_vmware.rw_handles [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5266a018-103e-787e-cd5b-e6796c59fff8/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1721.729804] env[63371]: ERROR oslo_vmware.rw_handles [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5266a018-103e-787e-cd5b-e6796c59fff8/disk-0.vmdk due to incomplete transfer. [ 1721.730102] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-2e448f7a-61e5-43d0-8c2b-a32337cb6741 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.738595] env[63371]: DEBUG oslo_vmware.rw_handles [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5266a018-103e-787e-cd5b-e6796c59fff8/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1721.738791] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Uploaded image 9e175974-83af-4327-8123-1c04de287277 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1721.741244] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1721.741748] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-863ea25f-a8b8-4f18-bca5-69aceacc2159 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.751288] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1721.751288] env[63371]: value = "task-1774583" [ 1721.751288] env[63371]: _type = "Task" [ 1721.751288] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.766017] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774583, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.781773] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774581, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.908665] env[63371]: DEBUG oslo_concurrency.lockutils [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Acquiring lock "44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.909182] env[63371]: DEBUG oslo_concurrency.lockutils [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Lock "44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.909392] env[63371]: DEBUG oslo_concurrency.lockutils [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Acquiring lock "44a392e4-32c1-4aaf-8dc0-7df50c1a28c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1721.909683] env[63371]: DEBUG oslo_concurrency.lockutils [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Lock "44a392e4-32c1-4aaf-8dc0-7df50c1a28c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.909875] env[63371]: DEBUG oslo_concurrency.lockutils [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Lock "44a392e4-32c1-4aaf-8dc0-7df50c1a28c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.912274] env[63371]: INFO nova.compute.manager [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Terminating instance [ 1721.914458] env[63371]: DEBUG nova.compute.manager [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1721.914643] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1721.915659] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc932376-3013-49d6-9582-d012581c54c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.926850] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1721.931069] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a82c61cc-0ba0-4b31-90e0-47e09d0a3395 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.941608] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774582, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.945233] env[63371]: DEBUG oslo_vmware.api [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Waiting for the task: (returnval){ [ 1721.945233] env[63371]: value = "task-1774584" [ 1721.945233] env[63371]: _type = "Task" [ 1721.945233] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.955177] env[63371]: DEBUG oslo_vmware.api [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.089306] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "refresh_cache-11527051-7a4f-481a-b5ed-14550c550c4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.089617] env[63371]: DEBUG oslo_vmware.api [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774580, 'name': PowerOnVM_Task, 'duration_secs': 2.400402} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.089864] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1722.090099] env[63371]: INFO nova.compute.manager [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Took 10.69 seconds to spawn the instance on the hypervisor. [ 1722.090311] env[63371]: DEBUG nova.compute.manager [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1722.091229] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db3afb22-58ca-4d72-a545-322e93f178b8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.155095] env[63371]: DEBUG nova.network.neutron [req-cc38a28f-3b4f-462d-97d0-6148a5160b87 req-57d441e9-b52e-418b-a860-a9e19fc81766 service nova] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1722.158205] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c9622c37-fc08-47d7-b98f-466b0e387eeb tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.323s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.187151] env[63371]: DEBUG nova.scheduler.client.report [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1722.214248] env[63371]: DEBUG nova.compute.manager [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1722.238429] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4dcd9461-1d9a-4468-aaaa-9f21b57a98d1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "b523486c-adae-4322-80be-1f3bf33ca192" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.238938] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4dcd9461-1d9a-4468-aaaa-9f21b57a98d1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.239167] env[63371]: DEBUG nova.compute.manager [None req-4dcd9461-1d9a-4468-aaaa-9f21b57a98d1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1722.243508] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b394d4-3596-4f00-82f6-32a056fd3506 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.262906] env[63371]: DEBUG nova.virt.hardware [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1722.263438] env[63371]: DEBUG nova.virt.hardware [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1722.263857] env[63371]: DEBUG nova.virt.hardware [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1722.264121] env[63371]: DEBUG nova.virt.hardware [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1722.264406] env[63371]: DEBUG nova.virt.hardware [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1722.264811] env[63371]: DEBUG nova.virt.hardware [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1722.265069] env[63371]: DEBUG nova.virt.hardware [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1722.265292] env[63371]: DEBUG nova.virt.hardware [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1722.266541] env[63371]: DEBUG nova.virt.hardware [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1722.267425] env[63371]: DEBUG nova.virt.hardware [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1722.268038] env[63371]: DEBUG nova.virt.hardware [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1722.268940] env[63371]: DEBUG nova.compute.manager [None req-4dcd9461-1d9a-4468-aaaa-9f21b57a98d1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63371) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1722.269629] env[63371]: DEBUG nova.objects.instance [None req-4dcd9461-1d9a-4468-aaaa-9f21b57a98d1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'flavor' on Instance uuid b523486c-adae-4322-80be-1f3bf33ca192 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1722.272369] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b75eec-73e9-4789-b0d6-b7ea4ad0db77 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.285104] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774583, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.289113] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb8a67bf-52a8-4fa0-871c-ad2f1ccc178c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.298500] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774581, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.300733] env[63371]: DEBUG nova.network.neutron [req-cc38a28f-3b4f-462d-97d0-6148a5160b87 req-57d441e9-b52e-418b-a860-a9e19fc81766 service nova] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.437470] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774582, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.964038} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.437814] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3a6c12a7-732f-4a73-a8c5-6810b554cc03/3a6c12a7-732f-4a73-a8c5-6810b554cc03.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1722.438106] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1722.438418] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-52de29fc-ff59-48e1-9b8c-8af9093476e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.449945] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1722.449945] env[63371]: value = "task-1774585" [ 1722.449945] env[63371]: _type = "Task" [ 1722.449945] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.457403] env[63371]: DEBUG oslo_vmware.api [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774584, 'name': PowerOffVM_Task, 'duration_secs': 0.324335} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.458126] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1722.458346] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1722.458652] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6fd13888-8745-4a36-b861-e596eeac57f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.463587] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774585, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.616521] env[63371]: INFO nova.compute.manager [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Took 29.49 seconds to build instance. [ 1722.617437] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1722.617699] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1722.617923] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Deleting the datastore file [datastore1] 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1722.618652] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-044cb688-4cd4-4204-8fa5-07fcebad5d9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.626418] env[63371]: DEBUG oslo_vmware.api [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Waiting for the task: (returnval){ [ 1722.626418] env[63371]: value = "task-1774587" [ 1722.626418] env[63371]: _type = "Task" [ 1722.626418] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.636090] env[63371]: DEBUG oslo_vmware.api [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774587, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.695828] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.516s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1722.695828] env[63371]: DEBUG nova.compute.manager [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1722.697068] env[63371]: DEBUG oslo_concurrency.lockutils [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.401s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1722.697673] env[63371]: DEBUG nova.objects.instance [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lazy-loading 'resources' on Instance uuid 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1722.767153] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774583, 'name': Destroy_Task, 'duration_secs': 0.611915} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.767468] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Destroyed the VM [ 1722.768441] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1722.768441] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fdc8d49e-6fba-429f-a99f-a79d4074fbaf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.783829] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1722.783829] env[63371]: value = "task-1774588" [ 1722.783829] env[63371]: _type = "Task" [ 1722.783829] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.789676] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dcd9461-1d9a-4468-aaaa-9f21b57a98d1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1722.793321] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bdaf0a83-1dd6-4da8-afb6-009d37ede6bc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.795098] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774581, 'name': CloneVM_Task, 'duration_secs': 2.23929} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.795776] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Created linked-clone VM from snapshot [ 1722.796973] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002daeeb-32ec-4c56-8868-ce88eebe845b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.804033] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774588, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.806289] env[63371]: DEBUG oslo_vmware.api [None req-4dcd9461-1d9a-4468-aaaa-9f21b57a98d1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1722.806289] env[63371]: value = "task-1774589" [ 1722.806289] env[63371]: _type = "Task" [ 1722.806289] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.815133] env[63371]: DEBUG oslo_concurrency.lockutils [req-cc38a28f-3b4f-462d-97d0-6148a5160b87 req-57d441e9-b52e-418b-a860-a9e19fc81766 service nova] Releasing lock "refresh_cache-11527051-7a4f-481a-b5ed-14550c550c4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.815589] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Uploading image c1f65783-705a-465c-a7c4-df7ebd09df62 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1722.817979] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired lock "refresh_cache-11527051-7a4f-481a-b5ed-14550c550c4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.819591] env[63371]: DEBUG nova.network.neutron [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1722.828566] env[63371]: DEBUG oslo_vmware.api [None req-4dcd9461-1d9a-4468-aaaa-9f21b57a98d1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774589, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.840319] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1722.840319] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4577264c-f733-4291-aeea-6f0eda39c6d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.848884] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1722.848884] env[63371]: value = "task-1774590" [ 1722.848884] env[63371]: _type = "Task" [ 1722.848884] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.859028] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774590, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.963156] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774585, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072444} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.963495] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1722.964233] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d0650b-00de-45ef-b7bf-a5449f24d7e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.993695] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 3a6c12a7-732f-4a73-a8c5-6810b554cc03/3a6c12a7-732f-4a73-a8c5-6810b554cc03.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1722.994200] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ad614e5-67ba-4bb6-a9aa-c223fcc7801f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.024856] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1723.024856] env[63371]: value = "task-1774591" [ 1723.024856] env[63371]: _type = "Task" [ 1723.024856] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.035503] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774591, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.116626] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5224db30-f751-4333-bdac-fdd3d81c19ce tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "158259a4-f54a-4192-b235-f03838193516" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.119301] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e9b29aa9-2e10-485f-868d-9185bb5b4a6a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "158259a4-f54a-4192-b235-f03838193516" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.008s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.119567] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5224db30-f751-4333-bdac-fdd3d81c19ce tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "158259a4-f54a-4192-b235-f03838193516" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.003s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.119752] env[63371]: DEBUG nova.compute.manager [None req-5224db30-f751-4333-bdac-fdd3d81c19ce tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1723.120741] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62d0d01d-bfc0-43b7-96ba-38d060b8a855 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.129454] env[63371]: DEBUG nova.compute.manager [None req-5224db30-f751-4333-bdac-fdd3d81c19ce tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63371) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1723.130122] env[63371]: DEBUG nova.objects.instance [None req-5224db30-f751-4333-bdac-fdd3d81c19ce tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lazy-loading 'flavor' on Instance uuid 158259a4-f54a-4192-b235-f03838193516 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1723.141674] env[63371]: DEBUG oslo_vmware.api [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Task: {'id': task-1774587, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.341043} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.141938] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1723.142141] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1723.142354] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1723.142604] env[63371]: INFO nova.compute.manager [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Took 1.23 seconds to destroy the instance on the hypervisor. [ 1723.142937] env[63371]: DEBUG oslo.service.loopingcall [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1723.143107] env[63371]: DEBUG nova.compute.manager [-] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1723.143713] env[63371]: DEBUG nova.network.neutron [-] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1723.180848] env[63371]: DEBUG nova.network.neutron [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Successfully updated port: 8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1723.203764] env[63371]: DEBUG nova.compute.utils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1723.208644] env[63371]: DEBUG nova.compute.manager [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1723.209209] env[63371]: DEBUG nova.network.neutron [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1723.292058] env[63371]: DEBUG nova.policy [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09ca20017c6245f9bae55080e98de838', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cdcb553167e84358b2f89a0eb9fe09ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1723.297681] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774588, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.316453] env[63371]: DEBUG oslo_vmware.api [None req-4dcd9461-1d9a-4468-aaaa-9f21b57a98d1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774589, 'name': PowerOffVM_Task, 'duration_secs': 0.358256} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.316743] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dcd9461-1d9a-4468-aaaa-9f21b57a98d1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1723.316912] env[63371]: DEBUG nova.compute.manager [None req-4dcd9461-1d9a-4468-aaaa-9f21b57a98d1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1723.317713] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e380d9-a23e-458c-a255-9aa7c0f743ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.368173] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774590, 'name': Destroy_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.369529] env[63371]: DEBUG nova.network.neutron [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1723.543086] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774591, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.571388] env[63371]: DEBUG nova.network.neutron [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Updating instance_info_cache with network_info: [{"id": "5d6f97e2-eb9c-468d-8931-77a4c10ff125", "address": "fa:16:3e:12:fb:cf", "network": {"id": "57b9c8c9-145c-4988-8307-0c44f962835e", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-309269171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdcb553167e84358b2f89a0eb9fe09ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d6f97e2-eb", "ovs_interfaceid": "5d6f97e2-eb9c-468d-8931-77a4c10ff125", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.628098] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69506d4a-32b7-4da5-9cc4-fd0144d45232 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.639021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edd02a0-0e51-4b98-879d-7c7f8b223d56 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.642753] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5224db30-f751-4333-bdac-fdd3d81c19ce tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1723.643049] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12dd1fee-8b2f-4f06-b506-8993a4276874 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.678227] env[63371]: DEBUG oslo_vmware.api [None req-5224db30-f751-4333-bdac-fdd3d81c19ce tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1723.678227] env[63371]: value = "task-1774592" [ 1723.678227] env[63371]: _type = "Task" [ 1723.678227] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.679988] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e080f945-d129-46a6-a3d5-89b0e2772de4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.684496] env[63371]: DEBUG nova.compute.manager [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Received event network-vif-plugged-8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1723.684496] env[63371]: DEBUG oslo_concurrency.lockutils [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] Acquiring lock "f391d4f3-6e9d-4ddc-918a-8dc8581dfc00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.684496] env[63371]: DEBUG oslo_concurrency.lockutils [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] Lock "f391d4f3-6e9d-4ddc-918a-8dc8581dfc00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.684496] env[63371]: DEBUG oslo_concurrency.lockutils [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] Lock "f391d4f3-6e9d-4ddc-918a-8dc8581dfc00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.684496] env[63371]: DEBUG nova.compute.manager [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] No waiting events found dispatching network-vif-plugged-8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1723.684790] env[63371]: WARNING nova.compute.manager [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Received unexpected event network-vif-plugged-8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92 for instance with vm_state building and task_state spawning. [ 1723.684884] env[63371]: DEBUG nova.compute.manager [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Received event network-changed-8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1723.685166] env[63371]: DEBUG nova.compute.manager [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Refreshing instance network info cache due to event network-changed-8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1723.685166] env[63371]: DEBUG oslo_concurrency.lockutils [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] Acquiring lock "refresh_cache-f391d4f3-6e9d-4ddc-918a-8dc8581dfc00" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.685517] env[63371]: DEBUG oslo_concurrency.lockutils [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] Acquired lock "refresh_cache-f391d4f3-6e9d-4ddc-918a-8dc8581dfc00" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.685517] env[63371]: DEBUG nova.network.neutron [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Refreshing network info cache for port 8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1723.691016] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "refresh_cache-f391d4f3-6e9d-4ddc-918a-8dc8581dfc00" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.704358] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24f5d44-dcc3-4625-93fa-4ce1ea1e94b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.709559] env[63371]: DEBUG oslo_vmware.api [None req-5224db30-f751-4333-bdac-fdd3d81c19ce tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.711280] env[63371]: DEBUG nova.compute.manager [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1723.730516] env[63371]: DEBUG nova.compute.provider_tree [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1723.796607] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774588, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.836815] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4dcd9461-1d9a-4468-aaaa-9f21b57a98d1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.598s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.869324] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774590, 'name': Destroy_Task, 'duration_secs': 0.938552} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.870558] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Destroyed the VM [ 1723.870809] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1723.871106] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-25975d15-e12c-444e-a4dd-f8c3723e06bd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.884025] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1723.884025] env[63371]: value = "task-1774593" [ 1723.884025] env[63371]: _type = "Task" [ 1723.884025] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.894113] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774593, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.011221] env[63371]: DEBUG nova.network.neutron [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Successfully created port: adaefbec-4084-4f4d-8db6-b7f5ff8df5ea {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1724.040055] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774591, 'name': ReconfigVM_Task, 'duration_secs': 0.772035} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.040055] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 3a6c12a7-732f-4a73-a8c5-6810b554cc03/3a6c12a7-732f-4a73-a8c5-6810b554cc03.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1724.040667] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd8bf44d-3a2f-4d3b-9181-3829ace2d449 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.048366] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1724.048366] env[63371]: value = "task-1774594" [ 1724.048366] env[63371]: _type = "Task" [ 1724.048366] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.058548] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774594, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.074269] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Releasing lock "refresh_cache-11527051-7a4f-481a-b5ed-14550c550c4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.074649] env[63371]: DEBUG nova.compute.manager [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Instance network_info: |[{"id": "5d6f97e2-eb9c-468d-8931-77a4c10ff125", "address": "fa:16:3e:12:fb:cf", "network": {"id": "57b9c8c9-145c-4988-8307-0c44f962835e", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-309269171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdcb553167e84358b2f89a0eb9fe09ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d6f97e2-eb", "ovs_interfaceid": "5d6f97e2-eb9c-468d-8931-77a4c10ff125", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1724.075238] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:fb:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd733acc2-07d0-479e-918c-ec8a21925389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d6f97e2-eb9c-468d-8931-77a4c10ff125', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1724.087634] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Creating folder: Project (cdcb553167e84358b2f89a0eb9fe09ef). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1724.087634] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-380ecfba-9e52-45e2-9ae2-be6ae38a4029 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.099453] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Created folder: Project (cdcb553167e84358b2f89a0eb9fe09ef) in parent group-v368199. [ 1724.099699] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Creating folder: Instances. Parent ref: group-v368449. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1724.099987] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-96df4ecb-34a9-49be-9987-bb180c56fa02 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.113053] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Created folder: Instances in parent group-v368449. [ 1724.113130] env[63371]: DEBUG oslo.service.loopingcall [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1724.113326] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1724.113555] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fcabeda-b4c8-4488-af2a-0850d9ace1fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.135877] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1724.135877] env[63371]: value = "task-1774597" [ 1724.135877] env[63371]: _type = "Task" [ 1724.135877] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.150975] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774597, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.198560] env[63371]: DEBUG oslo_vmware.api [None req-5224db30-f751-4333-bdac-fdd3d81c19ce tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774592, 'name': PowerOffVM_Task, 'duration_secs': 0.239086} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.198897] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5224db30-f751-4333-bdac-fdd3d81c19ce tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1724.199105] env[63371]: DEBUG nova.compute.manager [None req-5224db30-f751-4333-bdac-fdd3d81c19ce tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1724.199983] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b075e7b-19d8-47c7-b064-28aa9cac3b9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.234027] env[63371]: DEBUG nova.scheduler.client.report [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1724.268286] env[63371]: DEBUG nova.network.neutron [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1724.299441] env[63371]: DEBUG oslo_vmware.api [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774588, 'name': RemoveSnapshot_Task, 'duration_secs': 1.123015} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.299736] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1724.299975] env[63371]: INFO nova.compute.manager [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Took 17.19 seconds to snapshot the instance on the hypervisor. [ 1724.398644] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774593, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.439764] env[63371]: DEBUG nova.network.neutron [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.465712] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquiring lock "e16e4a55-4198-4308-b12c-d9ac07daecad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1724.465712] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lock "e16e4a55-4198-4308-b12c-d9ac07daecad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.558762] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774594, 'name': Rename_Task, 'duration_secs': 0.175778} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.559055] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1724.559311] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b417842-e94a-4824-8638-90b8d5a6a9f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.567139] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1724.567139] env[63371]: value = "task-1774598" [ 1724.567139] env[63371]: _type = "Task" [ 1724.567139] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.579754] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774598, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.612728] env[63371]: DEBUG nova.network.neutron [-] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.647755] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774597, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.718130] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5224db30-f751-4333-bdac-fdd3d81c19ce tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "158259a4-f54a-4192-b235-f03838193516" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.598s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.725157] env[63371]: DEBUG nova.compute.manager [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1724.739282] env[63371]: DEBUG oslo_concurrency.lockutils [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.042s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.742805] env[63371]: DEBUG oslo_concurrency.lockutils [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.455s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1724.742805] env[63371]: DEBUG nova.objects.instance [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1724.756556] env[63371]: DEBUG nova.virt.hardware [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1724.756786] env[63371]: DEBUG nova.virt.hardware [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1724.757420] env[63371]: DEBUG nova.virt.hardware [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1724.757420] env[63371]: DEBUG nova.virt.hardware [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1724.757420] env[63371]: DEBUG nova.virt.hardware [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1724.757420] env[63371]: DEBUG nova.virt.hardware [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1724.757650] env[63371]: DEBUG nova.virt.hardware [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1724.757820] env[63371]: DEBUG nova.virt.hardware [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1724.757954] env[63371]: DEBUG nova.virt.hardware [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1724.758242] env[63371]: DEBUG nova.virt.hardware [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1724.758305] env[63371]: DEBUG nova.virt.hardware [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1724.759665] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8cec85c-ae91-40d8-94a8-4eb297ddf300 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.769123] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc03ec62-fa01-4926-89da-2c58b9ab0c6c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.774828] env[63371]: INFO nova.scheduler.client.report [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Deleted allocations for instance 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed [ 1724.854106] env[63371]: DEBUG nova.compute.manager [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Found 3 images (rotation: 2) {{(pid=63371) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4555}} [ 1724.854289] env[63371]: DEBUG nova.compute.manager [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Rotating out 1 backups {{(pid=63371) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4563}} [ 1724.854502] env[63371]: DEBUG nova.compute.manager [None req-37f47841-fbc9-4bc9-8448-db6aeb77185c tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Deleting image 92bc8616-7d1c-4553-a5b6-e579f1683538 {{(pid=63371) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4568}} [ 1724.894060] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774593, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.932698] env[63371]: DEBUG nova.objects.instance [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'flavor' on Instance uuid b523486c-adae-4322-80be-1f3bf33ca192 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1724.942493] env[63371]: DEBUG oslo_concurrency.lockutils [req-d279aa57-aca4-4143-99a0-bafdf195d6a0 req-5f501dc8-4f73-482e-b17f-c1c8a8693c4a service nova] Releasing lock "refresh_cache-f391d4f3-6e9d-4ddc-918a-8dc8581dfc00" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1724.943029] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired lock "refresh_cache-f391d4f3-6e9d-4ddc-918a-8dc8581dfc00" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.943276] env[63371]: DEBUG nova.network.neutron [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1724.967535] env[63371]: DEBUG nova.compute.manager [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1725.081750] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774598, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.115705] env[63371]: INFO nova.compute.manager [-] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Took 1.97 seconds to deallocate network for instance. [ 1725.149112] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774597, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.283210] env[63371]: DEBUG oslo_concurrency.lockutils [None req-825c7855-0949-4414-a755-c6fa79c4d3bd tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.478s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.394847] env[63371]: DEBUG oslo_vmware.api [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774593, 'name': RemoveSnapshot_Task, 'duration_secs': 1.146841} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.395896] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1725.438670] env[63371]: DEBUG oslo_concurrency.lockutils [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.438670] env[63371]: DEBUG oslo_concurrency.lockutils [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.438670] env[63371]: DEBUG nova.network.neutron [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1725.438841] env[63371]: DEBUG nova.objects.instance [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'info_cache' on Instance uuid b523486c-adae-4322-80be-1f3bf33ca192 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1725.489995] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.494263] env[63371]: DEBUG nova.network.neutron [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1725.579267] env[63371]: DEBUG oslo_vmware.api [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774598, 'name': PowerOnVM_Task, 'duration_secs': 0.670868} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.579572] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1725.579779] env[63371]: INFO nova.compute.manager [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Took 11.62 seconds to spawn the instance on the hypervisor. [ 1725.579952] env[63371]: DEBUG nova.compute.manager [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1725.580786] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728cc236-6bf1-42c9-9e2e-24530a11ff47 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.623518] env[63371]: DEBUG oslo_concurrency.lockutils [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.647591] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774597, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.734223] env[63371]: DEBUG nova.compute.manager [req-6c391361-4e4c-49d5-9dcd-92398f9df825 req-63e69551-b1ff-4c31-8aae-8a12070a90b6 service nova] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Received event network-vif-deleted-b12875f1-bdc7-4980-9223-c0a2fee47b86 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1725.759458] env[63371]: DEBUG nova.network.neutron [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Updating instance_info_cache with network_info: [{"id": "8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92", "address": "fa:16:3e:f1:41:ff", "network": {"id": "57b9c8c9-145c-4988-8307-0c44f962835e", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-309269171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdcb553167e84358b2f89a0eb9fe09ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8667cc0b-44", "ovs_interfaceid": "8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.761034] env[63371]: INFO nova.compute.manager [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Rebuilding instance [ 1725.763637] env[63371]: DEBUG oslo_concurrency.lockutils [None req-124f3cb3-9bd7-4940-abce-ff3669b21685 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.765011] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.712s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.765453] env[63371]: DEBUG nova.objects.instance [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lazy-loading 'resources' on Instance uuid 9985dbcd-4498-4629-aae5-5e1933307c50 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1725.815906] env[63371]: DEBUG nova.compute.manager [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1725.816870] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbe8f8f-7f29-45c7-a89b-ea6a5a611a05 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.901533] env[63371]: WARNING nova.compute.manager [None req-0dbffa55-391e-463b-b80f-c653c3c58bcb tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Image not found during snapshot: nova.exception.ImageNotFound: Image c1f65783-705a-465c-a7c4-df7ebd09df62 could not be found. [ 1725.942144] env[63371]: DEBUG nova.objects.base [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1726.098637] env[63371]: INFO nova.compute.manager [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Took 32.20 seconds to build instance. [ 1726.150981] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774597, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.264465] env[63371]: DEBUG nova.network.neutron [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Successfully updated port: adaefbec-4084-4f4d-8db6-b7f5ff8df5ea {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1726.265500] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Releasing lock "refresh_cache-f391d4f3-6e9d-4ddc-918a-8dc8581dfc00" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.265690] env[63371]: DEBUG nova.compute.manager [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Instance network_info: |[{"id": "8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92", "address": "fa:16:3e:f1:41:ff", "network": {"id": "57b9c8c9-145c-4988-8307-0c44f962835e", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-309269171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdcb553167e84358b2f89a0eb9fe09ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8667cc0b-44", "ovs_interfaceid": "8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1726.266124] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:41:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd733acc2-07d0-479e-918c-ec8a21925389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1726.278793] env[63371]: DEBUG oslo.service.loopingcall [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1726.281148] env[63371]: DEBUG nova.objects.instance [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lazy-loading 'numa_topology' on Instance uuid 9985dbcd-4498-4629-aae5-5e1933307c50 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1726.286307] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1726.287466] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-691679e0-0da0-4860-9f95-a78b64c4e7d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.317478] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1726.317478] env[63371]: value = "task-1774599" [ 1726.317478] env[63371]: _type = "Task" [ 1726.317478] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.326476] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774599, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.333523] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1726.333837] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c149e59-37aa-41f3-8446-bec3f6c28274 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.341872] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1726.341872] env[63371]: value = "task-1774600" [ 1726.341872] env[63371]: _type = "Task" [ 1726.341872] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.353334] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1726.353617] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1726.354417] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ed2a4b-5f89-4eab-9aba-8efc97f76f57 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.363071] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1726.363284] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85f5e9df-6042-4eb8-884d-b71ca35e66e7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.387417] env[63371]: DEBUG oslo_concurrency.lockutils [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "935cf583-ecde-4a10-a773-6ff765e5bb49" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.387670] env[63371]: DEBUG oslo_concurrency.lockutils [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "935cf583-ecde-4a10-a773-6ff765e5bb49" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.387948] env[63371]: DEBUG oslo_concurrency.lockutils [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "935cf583-ecde-4a10-a773-6ff765e5bb49-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.388195] env[63371]: DEBUG oslo_concurrency.lockutils [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "935cf583-ecde-4a10-a773-6ff765e5bb49-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.388461] env[63371]: DEBUG oslo_concurrency.lockutils [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "935cf583-ecde-4a10-a773-6ff765e5bb49-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.391940] env[63371]: INFO nova.compute.manager [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Terminating instance [ 1726.394514] env[63371]: DEBUG nova.compute.manager [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1726.394775] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1726.395720] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69976eb0-99dd-4957-911c-7a6bb2740616 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.405181] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1726.408444] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-24f46cea-edd2-4719-be54-826e945d0127 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.413424] env[63371]: DEBUG oslo_vmware.api [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1726.413424] env[63371]: value = "task-1774602" [ 1726.413424] env[63371]: _type = "Task" [ 1726.413424] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.423560] env[63371]: DEBUG oslo_vmware.api [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774602, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.462028] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1726.462028] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1726.462028] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleting the datastore file [datastore1] 158259a4-f54a-4192-b235-f03838193516 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1726.462028] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d3e31e6-2aa3-4de1-adb1-af8adfd9d379 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.472282] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1726.472282] env[63371]: value = "task-1774603" [ 1726.472282] env[63371]: _type = "Task" [ 1726.472282] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.486638] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774603, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.603390] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0dd1630-697a-4c6d-ac16-caddf035b7ce tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.713s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.652436] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774597, 'name': CreateVM_Task, 'duration_secs': 2.217257} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.652436] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1726.652436] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.652436] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.652694] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1726.653056] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-269c8e5b-3e17-426a-ab46-b033ac805ebd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.658968] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1726.658968] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5286fa8a-2960-71fb-617e-12e0d601916f" [ 1726.658968] env[63371]: _type = "Task" [ 1726.658968] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.671911] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5286fa8a-2960-71fb-617e-12e0d601916f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.782054] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "refresh_cache-7349ecf6-2de7-4540-b713-7e29cbd3ff0b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.782054] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired lock "refresh_cache-7349ecf6-2de7-4540-b713-7e29cbd3ff0b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.782503] env[63371]: DEBUG nova.network.neutron [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1726.788702] env[63371]: DEBUG nova.objects.base [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Object Instance<9985dbcd-4498-4629-aae5-5e1933307c50> lazy-loaded attributes: resources,numa_topology {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1726.799944] env[63371]: DEBUG nova.network.neutron [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updating instance_info_cache with network_info: [{"id": "993ff886-27f6-48cd-be00-f0e8d292b060", "address": "fa:16:3e:14:89:81", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap993ff886-27", "ovs_interfaceid": "993ff886-27f6-48cd-be00-f0e8d292b060", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.831528] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774599, 'name': CreateVM_Task, 'duration_secs': 0.416369} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.831729] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1726.832464] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.924859] env[63371]: DEBUG oslo_vmware.api [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774602, 'name': PowerOffVM_Task, 'duration_secs': 0.284738} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.925155] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1726.925323] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1726.925585] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0fee0590-21cd-4fd8-b69e-b492b8036b45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.985585] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774603, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.175415} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.988211] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1726.988407] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1726.988585] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1727.057416] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1727.057645] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1727.057806] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleting the datastore file [datastore1] 935cf583-ecde-4a10-a773-6ff765e5bb49 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1727.058116] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66cc9553-6e16-47b3-9cb1-63a6d74f3ba3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.073206] env[63371]: DEBUG oslo_vmware.api [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for the task: (returnval){ [ 1727.073206] env[63371]: value = "task-1774605" [ 1727.073206] env[63371]: _type = "Task" [ 1727.073206] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.083337] env[63371]: DEBUG oslo_vmware.api [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774605, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.124178] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.124498] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.136269] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c579b231-717a-4450-b4b1-cb8fb7988cb8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.145735] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4449a38-9078-418a-826e-346f80d6ba7d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.180998] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7398e0-007e-4020-9930-8dadee87174f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.194878] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81d05b3-0657-49a3-b3e0-811ee8fec200 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.198959] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5286fa8a-2960-71fb-617e-12e0d601916f, 'name': SearchDatastore_Task, 'duration_secs': 0.020533} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.199279] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.199546] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1727.199745] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.199863] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.200074] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1727.200649] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.200959] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1727.201192] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f63a941e-c7af-4218-b218-e2d4da28e4fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.210849] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb13c378-3b34-4eb7-adbb-0b9c0cd634f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.212531] env[63371]: DEBUG nova.compute.provider_tree [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1727.218325] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1727.218325] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52966922-dee7-2a64-7ef5-b6ee0a02afd8" [ 1727.218325] env[63371]: _type = "Task" [ 1727.218325] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.222769] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1727.222938] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1727.224679] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09a8a5de-a632-4f64-819e-fe7ba22a7457 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.231113] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52966922-dee7-2a64-7ef5-b6ee0a02afd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.235578] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1727.235578] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c5cdee-5211-a153-18fc-ad11225e0f59" [ 1727.235578] env[63371]: _type = "Task" [ 1727.235578] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.244572] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c5cdee-5211-a153-18fc-ad11225e0f59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.304232] env[63371]: DEBUG oslo_concurrency.lockutils [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Releasing lock "refresh_cache-b523486c-adae-4322-80be-1f3bf33ca192" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.327391] env[63371]: DEBUG nova.network.neutron [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1727.501375] env[63371]: DEBUG nova.network.neutron [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Updating instance_info_cache with network_info: [{"id": "adaefbec-4084-4f4d-8db6-b7f5ff8df5ea", "address": "fa:16:3e:96:8c:80", "network": {"id": "57b9c8c9-145c-4988-8307-0c44f962835e", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-309269171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdcb553167e84358b2f89a0eb9fe09ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadaefbec-40", "ovs_interfaceid": "adaefbec-4084-4f4d-8db6-b7f5ff8df5ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1727.584040] env[63371]: DEBUG oslo_vmware.api [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Task: {'id': task-1774605, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.168711} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.584341] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1727.584559] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1727.584753] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1727.584923] env[63371]: INFO nova.compute.manager [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1727.585176] env[63371]: DEBUG oslo.service.loopingcall [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1727.585363] env[63371]: DEBUG nova.compute.manager [-] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1727.585464] env[63371]: DEBUG nova.network.neutron [-] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1727.628933] env[63371]: INFO nova.compute.manager [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Detaching volume 5d39df22-c7dc-4c2a-8bed-1f0a74a568c4 [ 1727.666134] env[63371]: INFO nova.virt.block_device [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Attempting to driver detach volume 5d39df22-c7dc-4c2a-8bed-1f0a74a568c4 from mountpoint /dev/sdb [ 1727.666134] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1727.666134] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368317', 'volume_id': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'name': 'volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '44cc8606-24f5-4f6b-b96f-3559c9c3f06e', 'attached_at': '', 'detached_at': '', 'volume_id': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'serial': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1727.666663] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ba6780-a203-4fad-94af-8ae396ca5649 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.694154] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35b9585-0ec5-4aa4-860f-5a245eaf03bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.702089] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef8f944-329f-48b6-85fe-d278c3ad6aa9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.724799] env[63371]: DEBUG nova.scheduler.client.report [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1727.733435] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb2a58a-5354-4728-84b8-2fde3b285377 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.744569] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52966922-dee7-2a64-7ef5-b6ee0a02afd8, 'name': SearchDatastore_Task, 'duration_secs': 0.011411} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.756310] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.757201] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1727.757441] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.757799] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] The volume has not been displaced from its original location: [datastore1] volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4/volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1727.763214] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Reconfiguring VM instance instance-0000001f to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1727.767837] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed36f43b-09fc-46ab-bb1a-0fb626a08c26 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.783554] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c5cdee-5211-a153-18fc-ad11225e0f59, 'name': SearchDatastore_Task, 'duration_secs': 0.008362} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.786871] env[63371]: DEBUG nova.compute.manager [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Received event network-vif-plugged-adaefbec-4084-4f4d-8db6-b7f5ff8df5ea {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1727.787119] env[63371]: DEBUG oslo_concurrency.lockutils [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] Acquiring lock "7349ecf6-2de7-4540-b713-7e29cbd3ff0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.787464] env[63371]: DEBUG oslo_concurrency.lockutils [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] Lock "7349ecf6-2de7-4540-b713-7e29cbd3ff0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.787662] env[63371]: DEBUG oslo_concurrency.lockutils [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] Lock "7349ecf6-2de7-4540-b713-7e29cbd3ff0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.788439] env[63371]: DEBUG nova.compute.manager [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] No waiting events found dispatching network-vif-plugged-adaefbec-4084-4f4d-8db6-b7f5ff8df5ea {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1727.788439] env[63371]: WARNING nova.compute.manager [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Received unexpected event network-vif-plugged-adaefbec-4084-4f4d-8db6-b7f5ff8df5ea for instance with vm_state building and task_state spawning. [ 1727.788439] env[63371]: DEBUG nova.compute.manager [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Received event network-changed-adaefbec-4084-4f4d-8db6-b7f5ff8df5ea {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1727.788439] env[63371]: DEBUG nova.compute.manager [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Refreshing instance network info cache due to event network-changed-adaefbec-4084-4f4d-8db6-b7f5ff8df5ea. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1727.788439] env[63371]: DEBUG oslo_concurrency.lockutils [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] Acquiring lock "refresh_cache-7349ecf6-2de7-4540-b713-7e29cbd3ff0b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.789560] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71951abc-e186-4be7-ae32-6258736de919 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.796128] env[63371]: DEBUG oslo_vmware.api [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1727.796128] env[63371]: value = "task-1774606" [ 1727.796128] env[63371]: _type = "Task" [ 1727.796128] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.798165] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1727.798165] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5278ddd7-7f94-303b-620d-264145cc86c5" [ 1727.798165] env[63371]: _type = "Task" [ 1727.798165] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.806372] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1727.807490] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ce6b767-6dff-405c-a959-75eff79a859d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.815466] env[63371]: DEBUG oslo_vmware.api [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774606, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.815728] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5278ddd7-7f94-303b-620d-264145cc86c5, 'name': SearchDatastore_Task, 'duration_secs': 0.010906} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.816388] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.816665] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 11527051-7a4f-481a-b5ed-14550c550c4e/11527051-7a4f-481a-b5ed-14550c550c4e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1727.816945] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.817181] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1727.817362] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-725cb9f9-3b32-475e-bedc-ad0356b7c935 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.823028] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95a1bd6b-0207-4dbf-9fc6-39fe7e2e7bf9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.823869] env[63371]: DEBUG oslo_vmware.api [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1727.823869] env[63371]: value = "task-1774607" [ 1727.823869] env[63371]: _type = "Task" [ 1727.823869] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.830108] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1727.830108] env[63371]: value = "task-1774608" [ 1727.830108] env[63371]: _type = "Task" [ 1727.830108] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.833864] env[63371]: DEBUG oslo_vmware.api [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774607, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.838054] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1727.840022] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1727.840022] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6245acc6-8888-4239-89c7-42830782e689 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.848109] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774608, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.848983] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1727.848983] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52450850-f376-768d-a458-25c7f8e5a5df" [ 1727.848983] env[63371]: _type = "Task" [ 1727.848983] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.860046] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52450850-f376-768d-a458-25c7f8e5a5df, 'name': SearchDatastore_Task, 'duration_secs': 0.011102} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.860876] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf7ffd04-16b4-4709-b249-9687829e45cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.871302] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1727.871302] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]529d3014-5052-32cd-e0f4-7cec02242383" [ 1727.871302] env[63371]: _type = "Task" [ 1727.871302] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.881577] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529d3014-5052-32cd-e0f4-7cec02242383, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.987228] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1727.987228] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.987228] env[63371]: DEBUG nova.compute.manager [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1727.988021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a8c177b-2a29-4d56-92a8-410a68a33d11 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.995757] env[63371]: DEBUG nova.compute.manager [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63371) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1727.996494] env[63371]: DEBUG nova.objects.instance [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'flavor' on Instance uuid 9862b0f0-ccf6-4e69-9e78-cf864adaa65e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1728.004128] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Releasing lock "refresh_cache-7349ecf6-2de7-4540-b713-7e29cbd3ff0b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.004128] env[63371]: DEBUG nova.compute.manager [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Instance network_info: |[{"id": "adaefbec-4084-4f4d-8db6-b7f5ff8df5ea", "address": "fa:16:3e:96:8c:80", "network": {"id": "57b9c8c9-145c-4988-8307-0c44f962835e", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-309269171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdcb553167e84358b2f89a0eb9fe09ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadaefbec-40", "ovs_interfaceid": "adaefbec-4084-4f4d-8db6-b7f5ff8df5ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1728.004422] env[63371]: DEBUG oslo_concurrency.lockutils [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] Acquired lock "refresh_cache-7349ecf6-2de7-4540-b713-7e29cbd3ff0b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1728.005070] env[63371]: DEBUG nova.network.neutron [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Refreshing network info cache for port adaefbec-4084-4f4d-8db6-b7f5ff8df5ea {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1728.006156] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:8c:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd733acc2-07d0-479e-918c-ec8a21925389', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'adaefbec-4084-4f4d-8db6-b7f5ff8df5ea', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1728.014497] env[63371]: DEBUG oslo.service.loopingcall [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1728.016244] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1728.017399] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c0db9829-cece-4e5a-b85b-9b31fc60775b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.042691] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1728.042691] env[63371]: value = "task-1774609" [ 1728.042691] env[63371]: _type = "Task" [ 1728.042691] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.045153] env[63371]: DEBUG nova.virt.hardware [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1728.045418] env[63371]: DEBUG nova.virt.hardware [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1728.045575] env[63371]: DEBUG nova.virt.hardware [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1728.045754] env[63371]: DEBUG nova.virt.hardware [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1728.045913] env[63371]: DEBUG nova.virt.hardware [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1728.046092] env[63371]: DEBUG nova.virt.hardware [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1728.046331] env[63371]: DEBUG nova.virt.hardware [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1728.046494] env[63371]: DEBUG nova.virt.hardware [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1728.046661] env[63371]: DEBUG nova.virt.hardware [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1728.046818] env[63371]: DEBUG nova.virt.hardware [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1728.046981] env[63371]: DEBUG nova.virt.hardware [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1728.047922] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-079ec647-a738-4bae-a658-588c7953b808 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.064442] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f9cf77-779f-4d32-ad81-090ba5f5c8aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.069095] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774609, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.082177] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:6f:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0169bee8-0cc2-4add-b53b-0dfecac574d2', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1728.091370] env[63371]: DEBUG oslo.service.loopingcall [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1728.091722] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 158259a4-f54a-4192-b235-f03838193516] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1728.092008] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-847d30ec-27d5-49f9-82c8-1bdad44afe5b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.112268] env[63371]: DEBUG nova.compute.manager [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Stashing vm_state: active {{(pid=63371) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1728.118192] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1728.118192] env[63371]: value = "task-1774610" [ 1728.118192] env[63371]: _type = "Task" [ 1728.118192] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.132121] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774610, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.241029] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.473s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.241029] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.863s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.241219] env[63371]: DEBUG nova.objects.instance [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lazy-loading 'resources' on Instance uuid 3da99cec-409f-4ea0-891c-2e9d7429674d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1728.310557] env[63371]: DEBUG oslo_vmware.api [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774606, 'name': ReconfigVM_Task, 'duration_secs': 0.289249} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.311211] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Reconfigured VM instance instance-0000001f to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1728.315935] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2acfd4ec-74dc-4f01-b0aa-af7724f4c7b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.340655] env[63371]: DEBUG oslo_vmware.api [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774607, 'name': PowerOnVM_Task} progress is 76%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.343326] env[63371]: DEBUG oslo_vmware.api [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1728.343326] env[63371]: value = "task-1774611" [ 1728.343326] env[63371]: _type = "Task" [ 1728.343326] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.351221] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774608, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.355594] env[63371]: DEBUG nova.network.neutron [-] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.361632] env[63371]: DEBUG oslo_vmware.api [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774611, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.382039] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529d3014-5052-32cd-e0f4-7cec02242383, 'name': SearchDatastore_Task, 'duration_secs': 0.03861} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.382774] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.383063] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] f391d4f3-6e9d-4ddc-918a-8dc8581dfc00/f391d4f3-6e9d-4ddc-918a-8dc8581dfc00.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1728.383356] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7373dd9d-1571-4c55-b491-9f067b048fd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.391017] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1728.391017] env[63371]: value = "task-1774612" [ 1728.391017] env[63371]: _type = "Task" [ 1728.391017] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.400879] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774612, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.504261] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1728.504490] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9743abed-b277-47a5-a283-9075f284b33a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.512729] env[63371]: DEBUG oslo_vmware.api [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1728.512729] env[63371]: value = "task-1774613" [ 1728.512729] env[63371]: _type = "Task" [ 1728.512729] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.528614] env[63371]: DEBUG oslo_vmware.api [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774613, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.570622] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774609, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.629852] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774610, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.632538] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.749081] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9690fa0c-00f5-4d44-99e7-12535b3a34e7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "9985dbcd-4498-4629-aae5-5e1933307c50" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 42.701s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.750252] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "9985dbcd-4498-4629-aae5-5e1933307c50" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 19.291s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1728.750495] env[63371]: INFO nova.compute.manager [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Unshelving [ 1728.834710] env[63371]: DEBUG nova.network.neutron [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Updated VIF entry in instance network info cache for port adaefbec-4084-4f4d-8db6-b7f5ff8df5ea. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1728.835207] env[63371]: DEBUG nova.network.neutron [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Updating instance_info_cache with network_info: [{"id": "adaefbec-4084-4f4d-8db6-b7f5ff8df5ea", "address": "fa:16:3e:96:8c:80", "network": {"id": "57b9c8c9-145c-4988-8307-0c44f962835e", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-309269171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdcb553167e84358b2f89a0eb9fe09ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapadaefbec-40", "ovs_interfaceid": "adaefbec-4084-4f4d-8db6-b7f5ff8df5ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1728.840513] env[63371]: DEBUG oslo_vmware.api [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774607, 'name': PowerOnVM_Task, 'duration_secs': 0.712359} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.844217] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1728.844453] env[63371]: DEBUG nova.compute.manager [None req-078bbaf8-b04b-4212-b070-572b57217921 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1728.846204] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67964644-6e12-4f2a-a793-d6bd94cc3c57 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.862066] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774608, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557669} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.862965] env[63371]: INFO nova.compute.manager [-] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Took 1.28 seconds to deallocate network for instance. [ 1728.863278] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 11527051-7a4f-481a-b5ed-14550c550c4e/11527051-7a4f-481a-b5ed-14550c550c4e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1728.863523] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1728.865722] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7390772-9a8a-4496-a4b0-1c42d047bfe4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.881847] env[63371]: DEBUG oslo_vmware.api [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774611, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.890454] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1728.890454] env[63371]: value = "task-1774614" [ 1728.890454] env[63371]: _type = "Task" [ 1728.890454] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.917224] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774614, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.918099] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774612, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.025516] env[63371]: DEBUG oslo_vmware.api [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774613, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.064128] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Acquiring lock "c04edf6d-8a07-4776-be0f-b763fb3059d2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.064128] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Lock "c04edf6d-8a07-4776-be0f-b763fb3059d2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.064302] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Acquiring lock "c04edf6d-8a07-4776-be0f-b763fb3059d2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.064506] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Lock "c04edf6d-8a07-4776-be0f-b763fb3059d2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.064681] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Lock "c04edf6d-8a07-4776-be0f-b763fb3059d2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.066522] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774609, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.069343] env[63371]: INFO nova.compute.manager [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Terminating instance [ 1729.071695] env[63371]: DEBUG nova.compute.manager [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1729.071936] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1729.072901] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b90b949-3d03-4b96-a490-a8f3592ad583 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.085324] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1729.085483] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85d172cc-d3d5-43ec-8533-284736c38524 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.093763] env[63371]: DEBUG oslo_vmware.api [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Waiting for the task: (returnval){ [ 1729.093763] env[63371]: value = "task-1774615" [ 1729.093763] env[63371]: _type = "Task" [ 1729.093763] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.103983] env[63371]: DEBUG oslo_vmware.api [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774615, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.128637] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774610, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.157322] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b53f3a-e125-4b84-8f18-7c3f0545a03e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.165883] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f989bd46-4fd9-4aac-9d30-9ac8c60c10b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.201691] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0facad54-0831-42ec-a5a3-0dfb83070937 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.214839] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd9db68-8af1-4d03-aa62-418611924381 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.235345] env[63371]: DEBUG nova.compute.provider_tree [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1729.341401] env[63371]: DEBUG oslo_concurrency.lockutils [req-04e3b1f9-0110-4eb7-ba71-71b0a4a685e6 req-bb538491-a90f-421d-8586-954073e5ed10 service nova] Releasing lock "refresh_cache-7349ecf6-2de7-4540-b713-7e29cbd3ff0b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1729.355777] env[63371]: DEBUG oslo_vmware.api [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774611, 'name': ReconfigVM_Task, 'duration_secs': 0.93034} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.356087] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368317', 'volume_id': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'name': 'volume-5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '44cc8606-24f5-4f6b-b96f-3559c9c3f06e', 'attached_at': '', 'detached_at': '', 'volume_id': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4', 'serial': '5d39df22-c7dc-4c2a-8bed-1f0a74a568c4'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1729.383057] env[63371]: DEBUG oslo_concurrency.lockutils [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.402660] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774614, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.219131} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.405923] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1729.406711] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3d4a5d-066e-4759-8035-e854b4a446a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.415404] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774612, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.717879} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.424493] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] f391d4f3-6e9d-4ddc-918a-8dc8581dfc00/f391d4f3-6e9d-4ddc-918a-8dc8581dfc00.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1729.424683] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1729.433738] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 11527051-7a4f-481a-b5ed-14550c550c4e/11527051-7a4f-481a-b5ed-14550c550c4e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1729.433986] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-85330813-d6db-4f37-8a51-fc36aab70352 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.435897] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a4426d5-24e5-4c2b-9bbc-7504bdd62cde {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.456749] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1729.456749] env[63371]: value = "task-1774616" [ 1729.456749] env[63371]: _type = "Task" [ 1729.456749] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.459521] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1729.459521] env[63371]: value = "task-1774617" [ 1729.459521] env[63371]: _type = "Task" [ 1729.459521] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.470432] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774617, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.473707] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774616, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.524932] env[63371]: DEBUG oslo_vmware.api [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774613, 'name': PowerOffVM_Task, 'duration_secs': 0.79174} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.525267] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1729.525464] env[63371]: DEBUG nova.compute.manager [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1729.526360] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679e242d-3c92-4f14-b15a-0fd27c71f05c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.560822] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774609, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.607074] env[63371]: DEBUG oslo_vmware.api [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774615, 'name': PowerOffVM_Task, 'duration_secs': 0.243377} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.607074] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1729.607074] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1729.607074] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce3fce5f-29a9-4a03-8567-d31912a89530 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.629186] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774610, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.739783] env[63371]: DEBUG nova.scheduler.client.report [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1729.744800] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1729.745065] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1729.745205] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Deleting the datastore file [datastore1] c04edf6d-8a07-4776-be0f-b763fb3059d2 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1729.745681] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-745af833-f029-4e8e-8f37-ecc977be291b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.752486] env[63371]: DEBUG oslo_vmware.api [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Waiting for the task: (returnval){ [ 1729.752486] env[63371]: value = "task-1774619" [ 1729.752486] env[63371]: _type = "Task" [ 1729.752486] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.767043] env[63371]: DEBUG oslo_vmware.api [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774619, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.776412] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1729.835145] env[63371]: DEBUG nova.compute.manager [req-3dc97ed5-03ab-43d7-90c9-7c070bd32c84 req-0e9681ef-49b0-4633-9374-c4c8cf81a536 service nova] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Received event network-vif-deleted-0fec120d-e875-4254-bf67-1c749227262a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1729.900712] env[63371]: DEBUG nova.objects.instance [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lazy-loading 'flavor' on Instance uuid 44cc8606-24f5-4f6b-b96f-3559c9c3f06e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1729.974336] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774616, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080319} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.976030] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774617, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.976030] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1729.976030] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2b24a6-a2f3-414a-aaf4-4ca34403d7fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.999133] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] f391d4f3-6e9d-4ddc-918a-8dc8581dfc00/f391d4f3-6e9d-4ddc-918a-8dc8581dfc00.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1729.999385] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a0905d6-0ce4-4685-92ef-a0b3f2b4424f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.021534] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1730.021534] env[63371]: value = "task-1774620" [ 1730.021534] env[63371]: _type = "Task" [ 1730.021534] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.030722] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774620, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.043432] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8e45d9a7-0af9-46f8-8c86-9a5606e3e8ce tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.061982] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774609, 'name': CreateVM_Task, 'duration_secs': 1.699287} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.065027] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1730.065027] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.065027] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.065027] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1730.065027] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6340e42a-40cb-4cb4-8ab2-d113dc455bdd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.073018] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1730.073018] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]526230b5-2fab-0e7f-b239-98ffd5c2a70f" [ 1730.073018] env[63371]: _type = "Task" [ 1730.073018] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.082718] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526230b5-2fab-0e7f-b239-98ffd5c2a70f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.133265] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774610, 'name': CreateVM_Task, 'duration_secs': 1.626489} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.133544] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 158259a4-f54a-4192-b235-f03838193516] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1730.134192] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.247351] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.006s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.250020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.536s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.251204] env[63371]: INFO nova.compute.claims [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1730.263470] env[63371]: DEBUG oslo_vmware.api [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Task: {'id': task-1774619, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.452802} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.264394] env[63371]: INFO nova.scheduler.client.report [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Deleted allocations for instance 3da99cec-409f-4ea0-891c-2e9d7429674d [ 1730.265351] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1730.265970] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1730.265970] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1730.265970] env[63371]: INFO nova.compute.manager [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1730.266121] env[63371]: DEBUG oslo.service.loopingcall [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1730.269607] env[63371]: DEBUG nova.compute.manager [-] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1730.269607] env[63371]: DEBUG nova.network.neutron [-] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1730.472822] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774617, 'name': ReconfigVM_Task, 'duration_secs': 0.909521} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.473062] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 11527051-7a4f-481a-b5ed-14550c550c4e/11527051-7a4f-481a-b5ed-14550c550c4e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1730.473817] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f95abe31-914b-4e5e-b196-65c4d94f8b2f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.481637] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1730.481637] env[63371]: value = "task-1774621" [ 1730.481637] env[63371]: _type = "Task" [ 1730.481637] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.495545] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774621, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.532370] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774620, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.582778] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]526230b5-2fab-0e7f-b239-98ffd5c2a70f, 'name': SearchDatastore_Task, 'duration_secs': 0.01637} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.583245] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.583980] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1730.583980] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.584100] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.584268] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1730.584604] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1730.584916] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1730.585159] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ef12400-9c48-4e86-9fe2-6bc0ac81f4c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.587171] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f4a0eccc-910a-4591-8a30-b87838a3f30e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.593756] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1730.593756] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]529c770d-f40c-fd9f-75f7-331561871b43" [ 1730.593756] env[63371]: _type = "Task" [ 1730.593756] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.599092] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1730.599445] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1730.600536] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b590c877-ef9d-4e8e-8224-96c6070bea94 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.621060] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529c770d-f40c-fd9f-75f7-331561871b43, 'name': SearchDatastore_Task} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.622665] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1730.622974] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1730.623260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1730.623679] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1730.623679] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5233277e-e4ed-2b72-6a93-d8cd0e12fba4" [ 1730.623679] env[63371]: _type = "Task" [ 1730.623679] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.633172] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5233277e-e4ed-2b72-6a93-d8cd0e12fba4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.775284] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1d509bb7-3802-4357-8202-97296abe1d21 tempest-ServerShowV257Test-2127604469 tempest-ServerShowV257Test-2127604469-project-member] Lock "3da99cec-409f-4ea0-891c-2e9d7429674d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.124s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.912012] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c394e6f4-fa53-4372-aa16-4615023d1fd0 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.784s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.992572] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774621, 'name': Rename_Task, 'duration_secs': 0.304708} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.992915] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1730.993238] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9cd692e9-215a-42f9-87dd-22894a1c07ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.000716] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1731.000716] env[63371]: value = "task-1774622" [ 1731.000716] env[63371]: _type = "Task" [ 1731.000716] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.008834] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.037048] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774620, 'name': ReconfigVM_Task, 'duration_secs': 0.795813} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.037048] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Reconfigured VM instance instance-0000005c to attach disk [datastore1] f391d4f3-6e9d-4ddc-918a-8dc8581dfc00/f391d4f3-6e9d-4ddc-918a-8dc8581dfc00.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1731.037048] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54d9f9f8-0d96-4f05-90ff-2f6adda27e07 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.043041] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1731.043041] env[63371]: value = "task-1774623" [ 1731.043041] env[63371]: _type = "Task" [ 1731.043041] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.055019] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774623, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.137405] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5233277e-e4ed-2b72-6a93-d8cd0e12fba4, 'name': SearchDatastore_Task, 'duration_secs': 0.017688} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.138683] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37e7ce41-6052-4379-9df2-00ef92500023 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.151241] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1731.151241] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bcc626-f98b-b2ac-9f39-9c262468a381" [ 1731.151241] env[63371]: _type = "Task" [ 1731.151241] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.151909] env[63371]: DEBUG nova.compute.manager [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Stashing vm_state: stopped {{(pid=63371) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1731.172443] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bcc626-f98b-b2ac-9f39-9c262468a381, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.305141] env[63371]: DEBUG nova.network.neutron [-] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1731.515214] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774622, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.554967] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774623, 'name': Rename_Task, 'duration_secs': 0.303368} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.557314] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1731.557721] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8eda3618-d8f2-42d2-aa7e-eec995913feb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.566181] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1731.566181] env[63371]: value = "task-1774624" [ 1731.566181] env[63371]: _type = "Task" [ 1731.566181] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.576836] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774624, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.614906] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e7d4c0-0433-490c-8fac-0ac1edc5d3aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.622960] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e95652d-1c04-4f93-9cfd-e0f8c7e85be0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.656281] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b35750-d472-440b-90f2-b72e2779b520 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.673922] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f948bbc1-7506-4a11-9ab5-47fea65512ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.677850] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bcc626-f98b-b2ac-9f39-9c262468a381, 'name': SearchDatastore_Task, 'duration_secs': 0.0474} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.679123] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.679123] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1731.679405] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7349ecf6-2de7-4540-b713-7e29cbd3ff0b/7349ecf6-2de7-4540-b713-7e29cbd3ff0b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1731.680086] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.680543] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1731.680543] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-171d7adb-347c-4011-8308-f10500c4e14a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.691681] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb116ce7-4231-4fdf-a852-db78ffd69c35 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.696018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.696018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.696018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.696018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.696018] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.696250] env[63371]: DEBUG nova.compute.provider_tree [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1731.697760] env[63371]: INFO nova.compute.manager [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Terminating instance [ 1731.699920] env[63371]: DEBUG nova.compute.manager [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1731.700125] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1731.701615] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb5b4a5-095d-4964-a4e3-f1223c673676 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.708021] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1731.708021] env[63371]: value = "task-1774625" [ 1731.708021] env[63371]: _type = "Task" [ 1731.708021] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.713634] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1731.713912] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1731.714084] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1731.715200] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-84b128df-1ba1-4444-a22a-4e82079a24f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.716948] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53179cc5-1772-41a7-9cbe-21bf170126ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.722208] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774625, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.727638] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1731.727638] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52486406-a86c-fb04-f702-f6f6b9e3cc96" [ 1731.727638] env[63371]: _type = "Task" [ 1731.727638] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.732254] env[63371]: DEBUG oslo_vmware.api [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1731.732254] env[63371]: value = "task-1774626" [ 1731.732254] env[63371]: _type = "Task" [ 1731.732254] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.740429] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52486406-a86c-fb04-f702-f6f6b9e3cc96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.746104] env[63371]: DEBUG oslo_vmware.api [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774626, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.806099] env[63371]: INFO nova.compute.manager [-] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Took 1.54 seconds to deallocate network for instance. [ 1731.923938] env[63371]: DEBUG nova.compute.manager [req-3c6e619e-d73c-4c0a-afb3-c31f6c0cacaa req-8c916af6-361c-437d-838f-d5f5162c9dc0 service nova] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Received event network-vif-deleted-18a0dce6-a0d5-44e7-85a3-d54e70aa89b3 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1732.014128] env[63371]: DEBUG oslo_vmware.api [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774622, 'name': PowerOnVM_Task, 'duration_secs': 0.838274} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.014459] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1732.014629] env[63371]: INFO nova.compute.manager [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Took 12.40 seconds to spawn the instance on the hypervisor. [ 1732.014845] env[63371]: DEBUG nova.compute.manager [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1732.015695] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b113d0-1cb3-4265-a1b6-541e52bbd0d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.081109] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774624, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.201180] env[63371]: DEBUG nova.scheduler.client.report [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1732.217990] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774625, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.240899] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52486406-a86c-fb04-f702-f6f6b9e3cc96, 'name': SearchDatastore_Task, 'duration_secs': 0.010833} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.242169] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dee08e5b-d4ce-4705-83ad-ce9acc4ac18d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.249582] env[63371]: DEBUG oslo_vmware.api [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774626, 'name': PowerOffVM_Task, 'duration_secs': 0.266816} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.250105] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1732.250315] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1732.250523] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13ff3dea-1ae3-4f53-9542-bf00be9baf9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.253805] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1732.253805] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52721b70-5a23-0f8f-3f57-82fa2709a8a8" [ 1732.253805] env[63371]: _type = "Task" [ 1732.253805] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.262206] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52721b70-5a23-0f8f-3f57-82fa2709a8a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.314404] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.397420] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1732.397845] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1732.397845] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Deleting the datastore file [datastore1] 44cc8606-24f5-4f6b-b96f-3559c9c3f06e {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1732.398158] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b46372fe-4472-4169-9bad-b2897806cab9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.405217] env[63371]: DEBUG oslo_vmware.api [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1732.405217] env[63371]: value = "task-1774628" [ 1732.405217] env[63371]: _type = "Task" [ 1732.405217] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.414287] env[63371]: DEBUG oslo_vmware.api [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774628, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.537754] env[63371]: INFO nova.compute.manager [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Took 29.27 seconds to build instance. [ 1732.581068] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774624, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.707060] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.457s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.707309] env[63371]: DEBUG nova.compute.manager [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1732.710037] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.272s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.710267] env[63371]: DEBUG nova.objects.instance [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Lazy-loading 'resources' on Instance uuid 6f31d6ad-480d-40dd-924e-f6277d93c99a {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1732.722734] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774625, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529562} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.723415] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 7349ecf6-2de7-4540-b713-7e29cbd3ff0b/7349ecf6-2de7-4540-b713-7e29cbd3ff0b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1732.723415] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1732.723695] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c2475888-3d43-48ef-8a6a-65c77de6cba5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.731923] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1732.731923] env[63371]: value = "task-1774629" [ 1732.731923] env[63371]: _type = "Task" [ 1732.731923] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.741899] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.765287] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52721b70-5a23-0f8f-3f57-82fa2709a8a8, 'name': SearchDatastore_Task, 'duration_secs': 0.071425} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.765560] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1732.765833] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 158259a4-f54a-4192-b235-f03838193516/158259a4-f54a-4192-b235-f03838193516.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1732.766128] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-684f5f90-45b4-40ab-bb0b-d7277ae61dba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.774805] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1732.774805] env[63371]: value = "task-1774630" [ 1732.774805] env[63371]: _type = "Task" [ 1732.774805] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.784757] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774630, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.916175] env[63371]: DEBUG oslo_vmware.api [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774628, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.45329} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.916460] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1732.916649] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1732.916822] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1732.916995] env[63371]: INFO nova.compute.manager [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1732.917284] env[63371]: DEBUG oslo.service.loopingcall [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1732.917502] env[63371]: DEBUG nova.compute.manager [-] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1732.917577] env[63371]: DEBUG nova.network.neutron [-] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1733.040608] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dbc36b09-cfcf-4008-b41b-ea8c6b3ae6bf tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "11527051-7a4f-481a-b5ed-14550c550c4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.781s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.083274] env[63371]: DEBUG oslo_vmware.api [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774624, 'name': PowerOnVM_Task, 'duration_secs': 1.402144} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.084617] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1733.084907] env[63371]: INFO nova.compute.manager [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Took 10.87 seconds to spawn the instance on the hypervisor. [ 1733.085149] env[63371]: DEBUG nova.compute.manager [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1733.086441] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93dea37b-dc5b-4d0b-bf44-6b846aa8a375 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.216526] env[63371]: DEBUG nova.compute.utils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1733.219349] env[63371]: DEBUG nova.compute.manager [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1733.219664] env[63371]: DEBUG nova.network.neutron [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1733.248502] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.096446} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.249469] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1733.253027] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc3c04b-59a1-43b9-aa9c-08dd8c4e31b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.277867] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 7349ecf6-2de7-4540-b713-7e29cbd3ff0b/7349ecf6-2de7-4540-b713-7e29cbd3ff0b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1733.281451] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e57240cf-f3d5-417e-be92-96a3390e2fae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.307702] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774630, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.312074] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1733.312074] env[63371]: value = "task-1774631" [ 1733.312074] env[63371]: _type = "Task" [ 1733.312074] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.324298] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774631, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.330992] env[63371]: DEBUG nova.policy [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c6416719728485f8dd45eea9e39fdc5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58f967d3770541269fb89f48b3df58c9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1733.569513] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.570043] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.570299] env[63371]: INFO nova.compute.manager [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Rebooting instance [ 1733.613659] env[63371]: INFO nova.compute.manager [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Took 29.92 seconds to build instance. [ 1733.678686] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e2406d-a2e0-4166-ae46-96728ef01afb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.687237] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9020294c-570a-47fb-9fa4-ac7c8a4b59d5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.728023] env[63371]: DEBUG nova.network.neutron [-] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.728023] env[63371]: DEBUG nova.compute.manager [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1733.728913] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa087e21-683a-4f80-8a4d-51fd09325592 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.738876] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c44d248f-1ef1-4269-a68c-86346aa126a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.754598] env[63371]: DEBUG nova.compute.provider_tree [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1733.792989] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774630, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.653533} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.793214] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 158259a4-f54a-4192-b235-f03838193516/158259a4-f54a-4192-b235-f03838193516.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1733.793474] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1733.793748] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e01e4aa-b2c7-412e-8d27-dfb3f12efdff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.803798] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1733.803798] env[63371]: value = "task-1774632" [ 1733.803798] env[63371]: _type = "Task" [ 1733.803798] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.810058] env[63371]: DEBUG nova.network.neutron [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Successfully created port: 2860b658-ff36-48a0-b36c-81ae2f4a6c16 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1733.816166] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774632, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.825275] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774631, 'name': ReconfigVM_Task, 'duration_secs': 0.398202} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.825625] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 7349ecf6-2de7-4540-b713-7e29cbd3ff0b/7349ecf6-2de7-4540-b713-7e29cbd3ff0b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1733.826299] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-770d8d0c-1983-4e6e-9a4f-d11a0cd98042 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.833061] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1733.833061] env[63371]: value = "task-1774633" [ 1733.833061] env[63371]: _type = "Task" [ 1733.833061] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.843476] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774633, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.008258] env[63371]: DEBUG nova.compute.manager [req-2b253e3d-11cb-4b37-ba5b-c4314cb33cc3 req-861864ef-610a-493b-9599-9ff1291b540b service nova] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Received event network-vif-deleted-b03ddfde-3b36-43a8-8c6a-00cd704bce22 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1734.090068] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.090271] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.090589] env[63371]: DEBUG nova.network.neutron [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1734.116281] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3e46be43-948e-4808-8bb2-eef7afd8fe80 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "f391d4f3-6e9d-4ddc-918a-8dc8581dfc00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.432s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.237347] env[63371]: INFO nova.compute.manager [-] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Took 1.32 seconds to deallocate network for instance. [ 1734.258171] env[63371]: DEBUG nova.scheduler.client.report [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1734.314824] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774632, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081254} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.315049] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1734.316583] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25730b49-dcab-4ea8-8e59-f18eb18d8d8d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.339445] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 158259a4-f54a-4192-b235-f03838193516/158259a4-f54a-4192-b235-f03838193516.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1734.339800] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c1a4af3-37e1-45fe-938e-8310b0f0c46b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.364972] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774633, 'name': Rename_Task, 'duration_secs': 0.176598} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.365995] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1734.366332] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1734.366332] env[63371]: value = "task-1774634" [ 1734.366332] env[63371]: _type = "Task" [ 1734.366332] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.366521] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98aca0dc-7c6d-41bb-b6a9-9f23a13a8748 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.376294] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774634, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.377391] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1734.377391] env[63371]: value = "task-1774635" [ 1734.377391] env[63371]: _type = "Task" [ 1734.377391] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.386045] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774635, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.743253] env[63371]: DEBUG nova.compute.manager [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1734.744748] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.764192] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.054s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.766363] env[63371]: DEBUG oslo_concurrency.lockutils [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.701s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.766599] env[63371]: DEBUG nova.objects.instance [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lazy-loading 'resources' on Instance uuid 382a5997-90bb-4bbc-b595-23c8d2f2e1f0 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1734.778811] env[63371]: DEBUG nova.virt.hardware [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1734.779729] env[63371]: DEBUG nova.virt.hardware [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1734.779729] env[63371]: DEBUG nova.virt.hardware [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1734.779729] env[63371]: DEBUG nova.virt.hardware [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1734.779729] env[63371]: DEBUG nova.virt.hardware [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1734.779929] env[63371]: DEBUG nova.virt.hardware [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1734.780091] env[63371]: DEBUG nova.virt.hardware [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1734.780329] env[63371]: DEBUG nova.virt.hardware [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1734.780527] env[63371]: DEBUG nova.virt.hardware [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1734.780694] env[63371]: DEBUG nova.virt.hardware [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1734.780879] env[63371]: DEBUG nova.virt.hardware [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1734.782104] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3c8113-b455-46bc-9c62-9091b61da08e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.793245] env[63371]: INFO nova.scheduler.client.report [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Deleted allocations for instance 6f31d6ad-480d-40dd-924e-f6277d93c99a [ 1734.795234] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e394bb6-eea1-4be2-987c-e4276aa44e3a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.864851] env[63371]: DEBUG nova.network.neutron [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [{"id": "01b878e5-651e-49f1-959f-7da17291c0bc", "address": "fa:16:3e:b7:c4:0c", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b878e5-65", "ovs_interfaceid": "01b878e5-651e-49f1-959f-7da17291c0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.879272] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774634, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.889410] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774635, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.305178] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ae9e9db-9067-46ff-81c7-4652663c3a82 tempest-ServersAaction247Test-1359088529 tempest-ServersAaction247Test-1359088529-project-member] Lock "6f31d6ad-480d-40dd-924e-f6277d93c99a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.742s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.367806] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.375014] env[63371]: DEBUG nova.compute.manager [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1735.375014] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c52756c-a893-46bb-ad62-35c3f41b45ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.387112] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774634, 'name': ReconfigVM_Task, 'duration_secs': 0.586779} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.390427] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 158259a4-f54a-4192-b235-f03838193516/158259a4-f54a-4192-b235-f03838193516.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1735.391726] env[63371]: DEBUG nova.compute.manager [req-758686db-469a-4ee7-924b-fc9824a0c4dc req-6eb4ccbd-2d18-4c87-89da-f016b8549f36 service nova] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Received event network-vif-plugged-2860b658-ff36-48a0-b36c-81ae2f4a6c16 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1735.391920] env[63371]: DEBUG oslo_concurrency.lockutils [req-758686db-469a-4ee7-924b-fc9824a0c4dc req-6eb4ccbd-2d18-4c87-89da-f016b8549f36 service nova] Acquiring lock "485a2d6a-1b58-470d-9dc5-8cf31b6726ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.392132] env[63371]: DEBUG oslo_concurrency.lockutils [req-758686db-469a-4ee7-924b-fc9824a0c4dc req-6eb4ccbd-2d18-4c87-89da-f016b8549f36 service nova] Lock "485a2d6a-1b58-470d-9dc5-8cf31b6726ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.392512] env[63371]: DEBUG oslo_concurrency.lockutils [req-758686db-469a-4ee7-924b-fc9824a0c4dc req-6eb4ccbd-2d18-4c87-89da-f016b8549f36 service nova] Lock "485a2d6a-1b58-470d-9dc5-8cf31b6726ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.392512] env[63371]: DEBUG nova.compute.manager [req-758686db-469a-4ee7-924b-fc9824a0c4dc req-6eb4ccbd-2d18-4c87-89da-f016b8549f36 service nova] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] No waiting events found dispatching network-vif-plugged-2860b658-ff36-48a0-b36c-81ae2f4a6c16 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1735.392787] env[63371]: WARNING nova.compute.manager [req-758686db-469a-4ee7-924b-fc9824a0c4dc req-6eb4ccbd-2d18-4c87-89da-f016b8549f36 service nova] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Received unexpected event network-vif-plugged-2860b658-ff36-48a0-b36c-81ae2f4a6c16 for instance with vm_state building and task_state spawning. [ 1735.396310] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3372761d-1855-47c4-a861-8e41e4e94305 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.403475] env[63371]: DEBUG oslo_vmware.api [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774635, 'name': PowerOnVM_Task, 'duration_secs': 0.724921} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.403749] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1735.403973] env[63371]: INFO nova.compute.manager [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Took 10.68 seconds to spawn the instance on the hypervisor. [ 1735.404171] env[63371]: DEBUG nova.compute.manager [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1735.404960] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f150601b-face-49c7-8fc7-5fc249a1bda7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.410863] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1735.410863] env[63371]: value = "task-1774636" [ 1735.410863] env[63371]: _type = "Task" [ 1735.410863] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.430706] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774636, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.619213] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-769b7f4c-3489-47b4-8985-052af4b9ac81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.628193] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec90b301-50d8-40ef-92c1-c31a2fb56322 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.660360] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-950be2b8-11fc-4e81-bcb8-0dc8798e117c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.669126] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4353e0-51e6-4a65-a95a-a2b3703c520e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.683621] env[63371]: DEBUG nova.compute.provider_tree [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1735.927612] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774636, 'name': Rename_Task, 'duration_secs': 0.224604} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.927915] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1735.928191] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-effeaf07-f0a5-4625-bd12-aadf79f2e2a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.934066] env[63371]: INFO nova.compute.manager [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Took 31.88 seconds to build instance. [ 1735.938031] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1735.938031] env[63371]: value = "task-1774637" [ 1735.938031] env[63371]: _type = "Task" [ 1735.938031] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.952933] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774637, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.971568] env[63371]: DEBUG nova.network.neutron [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Successfully updated port: 2860b658-ff36-48a0-b36c-81ae2f4a6c16 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1736.086470] env[63371]: DEBUG nova.compute.manager [req-18b654ba-e517-419c-baa4-3fa2aa0fa744 req-43a77270-8a20-4ced-857e-75173abdf203 service nova] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Received event network-changed-2860b658-ff36-48a0-b36c-81ae2f4a6c16 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1736.086985] env[63371]: DEBUG nova.compute.manager [req-18b654ba-e517-419c-baa4-3fa2aa0fa744 req-43a77270-8a20-4ced-857e-75173abdf203 service nova] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Refreshing instance network info cache due to event network-changed-2860b658-ff36-48a0-b36c-81ae2f4a6c16. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1736.086985] env[63371]: DEBUG oslo_concurrency.lockutils [req-18b654ba-e517-419c-baa4-3fa2aa0fa744 req-43a77270-8a20-4ced-857e-75173abdf203 service nova] Acquiring lock "refresh_cache-485a2d6a-1b58-470d-9dc5-8cf31b6726ef" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.087087] env[63371]: DEBUG oslo_concurrency.lockutils [req-18b654ba-e517-419c-baa4-3fa2aa0fa744 req-43a77270-8a20-4ced-857e-75173abdf203 service nova] Acquired lock "refresh_cache-485a2d6a-1b58-470d-9dc5-8cf31b6726ef" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.087302] env[63371]: DEBUG nova.network.neutron [req-18b654ba-e517-419c-baa4-3fa2aa0fa744 req-43a77270-8a20-4ced-857e-75173abdf203 service nova] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Refreshing network info cache for port 2860b658-ff36-48a0-b36c-81ae2f4a6c16 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1736.191450] env[63371]: DEBUG nova.scheduler.client.report [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1736.404092] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4646e0fb-eb78-4c09-9e6c-86db4a3cafc1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.415077] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Doing hard reboot of VM {{(pid=63371) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1736.415458] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-66b8d9d2-9aa5-473f-9d5f-d94546c1873d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.423740] env[63371]: DEBUG oslo_vmware.api [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1736.423740] env[63371]: value = "task-1774638" [ 1736.423740] env[63371]: _type = "Task" [ 1736.423740] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.434357] env[63371]: DEBUG oslo_vmware.api [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774638, 'name': ResetVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.435931] env[63371]: DEBUG oslo_concurrency.lockutils [None req-51da4838-bdd7-4055-ad8c-8bef1549872e tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "7349ecf6-2de7-4540-b713-7e29cbd3ff0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.401s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.450549] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774637, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.474538] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "refresh_cache-485a2d6a-1b58-470d-9dc5-8cf31b6726ef" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.624649] env[63371]: DEBUG nova.network.neutron [req-18b654ba-e517-419c-baa4-3fa2aa0fa744 req-43a77270-8a20-4ced-857e-75173abdf203 service nova] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1736.696294] env[63371]: DEBUG oslo_concurrency.lockutils [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.930s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1736.699645] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 16.433s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.726077] env[63371]: INFO nova.scheduler.client.report [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted allocations for instance 382a5997-90bb-4bbc-b595-23c8d2f2e1f0 [ 1736.783311] env[63371]: DEBUG nova.network.neutron [req-18b654ba-e517-419c-baa4-3fa2aa0fa744 req-43a77270-8a20-4ced-857e-75173abdf203 service nova] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.935697] env[63371]: DEBUG oslo_vmware.api [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774638, 'name': ResetVM_Task, 'duration_secs': 0.111969} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.935996] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Did hard reboot of VM {{(pid=63371) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1736.936191] env[63371]: DEBUG nova.compute.manager [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1736.936991] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81aec8f-2f65-4bc7-b254-40cd906beceb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.953361] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774637, 'name': PowerOnVM_Task, 'duration_secs': 0.978443} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.953646] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1736.953882] env[63371]: DEBUG nova.compute.manager [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1736.954823] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d2dfa1-9c79-4d3c-a3f3-90624ad824cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.234661] env[63371]: DEBUG oslo_concurrency.lockutils [None req-18d4be13-d63a-4b3f-bedc-730cd3298559 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "382a5997-90bb-4bbc-b595-23c8d2f2e1f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.265s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.287297] env[63371]: DEBUG oslo_concurrency.lockutils [req-18b654ba-e517-419c-baa4-3fa2aa0fa744 req-43a77270-8a20-4ced-857e-75173abdf203 service nova] Releasing lock "refresh_cache-485a2d6a-1b58-470d-9dc5-8cf31b6726ef" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.287297] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "refresh_cache-485a2d6a-1b58-470d-9dc5-8cf31b6726ef" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.287297] env[63371]: DEBUG nova.network.neutron [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1737.456954] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f5b6ea8-ef36-4979-8382-57b51f7da5bf tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.887s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.468916] env[63371]: INFO nova.compute.manager [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] bringing vm to original state: 'stopped' [ 1737.718147] env[63371]: INFO nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating resource usage from migration 1bad7cd8-a319-4561-8560-524cc376e5e2 [ 1737.718147] env[63371]: INFO nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating resource usage from migration c0049d9e-3f16-4dab-89a1-5e74800f317c [ 1737.749127] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 44cc8606-24f5-4f6b-b96f-3559c9c3f06e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1737.749406] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance b523486c-adae-4322-80be-1f3bf33ca192 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1737.749566] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1737.749719] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 6df9af10-0053-4696-920a-10ab2af67ef5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1737.749857] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance d00602b9-16bf-4c11-bc47-6076dddbf159 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1737.749981] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance c03e2dc4-75d9-4fbb-afc8-046cbbf908ac actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1737.750376] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 0c9156ea-81c4-4286-a20b-66068a5bce59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1737.750376] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1737.750499] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance c04edf6d-8a07-4776-be0f-b763fb3059d2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1737.750593] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance cf63c2a2-ee72-464e-944d-5e53ca8635ac actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1737.750727] env[63371]: WARNING nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 935cf583-ecde-4a10-a773-6ff765e5bb49 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1737.750906] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 158259a4-f54a-4192-b235-f03838193516 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1737.751527] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 11527051-7a4f-481a-b5ed-14550c550c4e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1737.751760] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance f391d4f3-6e9d-4ddc-918a-8dc8581dfc00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1737.752017] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 7349ecf6-2de7-4540-b713-7e29cbd3ff0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1737.829255] env[63371]: DEBUG nova.network.neutron [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1738.033337] env[63371]: DEBUG nova.network.neutron [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Updating instance_info_cache with network_info: [{"id": "2860b658-ff36-48a0-b36c-81ae2f4a6c16", "address": "fa:16:3e:9b:59:00", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2860b658-ff", "ovs_interfaceid": "2860b658-ff36-48a0-b36c-81ae2f4a6c16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1738.258701] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 9985dbcd-4498-4629-aae5-5e1933307c50 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1738.258701] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 485a2d6a-1b58-470d-9dc5-8cf31b6726ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1738.480445] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "158259a4-f54a-4192-b235-f03838193516" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.480814] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "158259a4-f54a-4192-b235-f03838193516" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.481034] env[63371]: DEBUG nova.compute.manager [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1738.481993] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd0420d0-9202-401f-9b28-d3823dfd0d94 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.490111] env[63371]: DEBUG nova.compute.manager [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63371) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1738.492427] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1738.492680] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22800952-b3ca-4d4f-9cb2-c0bc15f3ba95 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.499680] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1738.499680] env[63371]: value = "task-1774639" [ 1738.499680] env[63371]: _type = "Task" [ 1738.499680] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.509793] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774639, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.536198] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "refresh_cache-485a2d6a-1b58-470d-9dc5-8cf31b6726ef" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.536505] env[63371]: DEBUG nova.compute.manager [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Instance network_info: |[{"id": "2860b658-ff36-48a0-b36c-81ae2f4a6c16", "address": "fa:16:3e:9b:59:00", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2860b658-ff", "ovs_interfaceid": "2860b658-ff36-48a0-b36c-81ae2f4a6c16", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1738.536956] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:59:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2860b658-ff36-48a0-b36c-81ae2f4a6c16', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1738.545121] env[63371]: DEBUG oslo.service.loopingcall [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1738.545434] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1738.545603] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-292ba400-af2c-4b59-b8e0-ed29de424f51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.567532] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1738.567532] env[63371]: value = "task-1774640" [ 1738.567532] env[63371]: _type = "Task" [ 1738.567532] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.578979] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774640, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.587641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "0c8c6997-bec8-4a3b-80cf-cbf35f3843f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.587641] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "0c8c6997-bec8-4a3b-80cf-cbf35f3843f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.763416] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e16e4a55-4198-4308-b12c-d9ac07daecad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1738.763416] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Migration 1bad7cd8-a319-4561-8560-524cc376e5e2 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1738.763416] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 3a6c12a7-732f-4a73-a8c5-6810b554cc03 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1738.763416] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Migration c0049d9e-3f16-4dab-89a1-5e74800f317c is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1738.763416] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 9862b0f0-ccf6-4e69-9e78-cf864adaa65e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1738.763416] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 14 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1738.763416] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3264MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=14 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1739.016481] env[63371]: DEBUG oslo_vmware.api [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774639, 'name': PowerOffVM_Task, 'duration_secs': 0.373071} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.016808] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1739.016993] env[63371]: DEBUG nova.compute.manager [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1739.017863] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd586495-e906-4b4e-ac52-d8f1aa30b8d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.085211] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774640, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.089026] env[63371]: DEBUG nova.compute.manager [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1739.132923] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae82389-ac34-466a-8428-48c926e6d037 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.141804] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df28970d-5c17-4ff1-ae51-31b0b3d9e06c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.175346] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a525d600-31d0-4276-bbb0-dfd4b8227176 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.187360] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5deb5c1-44b0-4909-aec6-a2556bd0bbc2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.202465] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1739.468564] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "11527051-7a4f-481a-b5ed-14550c550c4e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.468848] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "11527051-7a4f-481a-b5ed-14550c550c4e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.469206] env[63371]: DEBUG nova.compute.manager [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1739.470549] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d822df-edad-4fe7-8ed4-5495e5248ec0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.478318] env[63371]: DEBUG nova.compute.manager [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63371) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1739.479068] env[63371]: DEBUG nova.objects.instance [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lazy-loading 'flavor' on Instance uuid 11527051-7a4f-481a-b5ed-14550c550c4e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1739.534923] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "158259a4-f54a-4192-b235-f03838193516" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.054s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.583903] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774640, 'name': CreateVM_Task, 'duration_secs': 0.991717} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.584138] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1739.584858] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1739.585019] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1739.586314] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1739.586314] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49be858e-f672-4d62-a6da-0e2d6e014f4b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.591423] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1739.591423] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fbab6b-8e38-ee6c-0277-a0c31ac319a4" [ 1739.591423] env[63371]: _type = "Task" [ 1739.591423] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.604383] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fbab6b-8e38-ee6c-0277-a0c31ac319a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.614119] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.706755] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1739.985708] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1739.986043] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8d0faee-06b9-4e89-80d6-23888f0b8373 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.998172] env[63371]: DEBUG oslo_vmware.api [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1739.998172] env[63371]: value = "task-1774641" [ 1739.998172] env[63371]: _type = "Task" [ 1739.998172] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.013779] env[63371]: DEBUG oslo_vmware.api [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774641, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.045227] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.103244] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fbab6b-8e38-ee6c-0277-a0c31ac319a4, 'name': SearchDatastore_Task, 'duration_secs': 0.044157} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.103782] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.103863] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1740.104085] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.104866] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.104866] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1740.104866] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b4c1ba43-2e79-4b54-b039-b623a9a88bd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.115183] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1740.115225] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1740.115961] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4eb607b6-265a-48ba-9831-561122ec044c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.122986] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1740.122986] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521e91cb-b85d-21c3-8be0-c63e42c3959f" [ 1740.122986] env[63371]: _type = "Task" [ 1740.122986] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.132936] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521e91cb-b85d-21c3-8be0-c63e42c3959f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.214215] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1740.214507] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.516s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.214834] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.725s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.216358] env[63371]: INFO nova.compute.claims [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1740.220443] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1740.220592] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 1740.510109] env[63371]: DEBUG oslo_vmware.api [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774641, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.628901] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Acquiring lock "943e2506-03a4-4633-b55b-381d9d8d9ef6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.629209] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Lock "943e2506-03a4-4633-b55b-381d9d8d9ef6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.645157] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521e91cb-b85d-21c3-8be0-c63e42c3959f, 'name': SearchDatastore_Task, 'duration_secs': 0.018251} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.646502] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5c1199d-a698-4972-81cb-28459c9267cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.656948] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1740.656948] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52914a69-149d-1fbc-2474-3ae84a88e08b" [ 1740.656948] env[63371]: _type = "Task" [ 1740.656948] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.669097] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52914a69-149d-1fbc-2474-3ae84a88e08b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.751316] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] There are 64 instances to clean {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 1740.751558] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 6f31d6ad-480d-40dd-924e-f6277d93c99a] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1740.857634] env[63371]: DEBUG oslo_concurrency.lockutils [None req-476198ff-8458-40e9-a601-b8b5e6fea14b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "interface-d00602b9-16bf-4c11-bc47-6076dddbf159-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1740.857920] env[63371]: DEBUG oslo_concurrency.lockutils [None req-476198ff-8458-40e9-a601-b8b5e6fea14b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-d00602b9-16bf-4c11-bc47-6076dddbf159-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1740.858422] env[63371]: DEBUG nova.objects.instance [None req-476198ff-8458-40e9-a601-b8b5e6fea14b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'flavor' on Instance uuid d00602b9-16bf-4c11-bc47-6076dddbf159 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1741.009073] env[63371]: DEBUG oslo_vmware.api [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774641, 'name': PowerOffVM_Task, 'duration_secs': 0.534477} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.009374] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1741.009549] env[63371]: DEBUG nova.compute.manager [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1741.010354] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15fd5bd8-d421-45c2-a8b3-51d8945f747d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.129720] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7a1a503-c481-4fb5-bdd1-a76026905e83 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.137149] env[63371]: DEBUG nova.compute.manager [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1741.140574] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c6e343f-cfd8-4fe7-bc72-c7446a9c825b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.178233] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33bd9d1d-2f4a-4ee3-a199-93608b0af614 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.190398] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57e3cdd-32df-42f1-8e53-35873d8b983c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.194972] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52914a69-149d-1fbc-2474-3ae84a88e08b, 'name': SearchDatastore_Task, 'duration_secs': 0.029587} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.195526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1741.195851] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 485a2d6a-1b58-470d-9dc5-8cf31b6726ef/485a2d6a-1b58-470d-9dc5-8cf31b6726ef.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1741.196492] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b99e8cf-5cdf-40a9-9589-bdea09f54ba5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.207579] env[63371]: DEBUG nova.compute.provider_tree [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1741.210123] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1741.210123] env[63371]: value = "task-1774642" [ 1741.210123] env[63371]: _type = "Task" [ 1741.210123] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.219560] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774642, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.259416] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 382a5997-90bb-4bbc-b595-23c8d2f2e1f0] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1741.362649] env[63371]: DEBUG nova.objects.instance [None req-476198ff-8458-40e9-a601-b8b5e6fea14b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'pci_requests' on Instance uuid d00602b9-16bf-4c11-bc47-6076dddbf159 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1741.523944] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e456c420-8bc4-425a-928f-43fadcc8fdff tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "11527051-7a4f-481a-b5ed-14550c550c4e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.055s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.558775] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "158259a4-f54a-4192-b235-f03838193516" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.559155] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "158259a4-f54a-4192-b235-f03838193516" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.559515] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "158259a4-f54a-4192-b235-f03838193516-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.559817] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "158259a4-f54a-4192-b235-f03838193516-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.560120] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "158259a4-f54a-4192-b235-f03838193516-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.563615] env[63371]: INFO nova.compute.manager [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Terminating instance [ 1741.565570] env[63371]: DEBUG nova.compute.manager [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1741.565894] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1741.567195] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9628f195-d9a7-418b-9d39-5982bcc8d2aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.579264] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1741.579713] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-381032d4-4813-49f2-9548-2b998663b760 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.660030] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.711905] env[63371]: DEBUG nova.scheduler.client.report [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1741.727490] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774642, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488237} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.727894] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 485a2d6a-1b58-470d-9dc5-8cf31b6726ef/485a2d6a-1b58-470d-9dc5-8cf31b6726ef.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1741.728167] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1741.728479] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41a4259b-f3e5-4c63-8a5f-cfc7daa4fe40 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.734221] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1741.734221] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1741.734221] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleting the datastore file [datastore1] 158259a4-f54a-4192-b235-f03838193516 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1741.734221] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a41cefe3-9f14-4973-8708-01ec15110f91 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.737438] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1741.737438] env[63371]: value = "task-1774644" [ 1741.737438] env[63371]: _type = "Task" [ 1741.737438] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.744129] env[63371]: DEBUG oslo_vmware.api [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1741.744129] env[63371]: value = "task-1774645" [ 1741.744129] env[63371]: _type = "Task" [ 1741.744129] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.747483] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774644, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.757716] env[63371]: DEBUG oslo_vmware.api [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774645, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.763330] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3da99cec-409f-4ea0-891c-2e9d7429674d] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1741.865751] env[63371]: DEBUG nova.objects.base [None req-476198ff-8458-40e9-a601-b8b5e6fea14b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1741.865751] env[63371]: DEBUG nova.network.neutron [None req-476198ff-8458-40e9-a601-b8b5e6fea14b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1742.029664] env[63371]: DEBUG oslo_concurrency.lockutils [None req-476198ff-8458-40e9-a601-b8b5e6fea14b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-d00602b9-16bf-4c11-bc47-6076dddbf159-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.172s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.220390] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.005s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.220932] env[63371]: DEBUG nova.compute.manager [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1742.223791] env[63371]: DEBUG oslo_concurrency.lockutils [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.600s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.223918] env[63371]: DEBUG oslo_concurrency.lockutils [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.225909] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 13.594s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.250172] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774644, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070836} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.251797] env[63371]: INFO nova.scheduler.client.report [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Deleted allocations for instance 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6 [ 1742.256104] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1742.261148] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a71e09f4-7791-4b65-92f4-f0ffe70c2678 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.278187] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 33952466-3df7-4485-8e7a-ab3d6ec3f22c] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1742.280590] env[63371]: DEBUG oslo_vmware.api [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774645, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150221} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.288862] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 485a2d6a-1b58-470d-9dc5-8cf31b6726ef/485a2d6a-1b58-470d-9dc5-8cf31b6726ef.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1742.289452] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1742.289644] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1742.289814] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1742.289976] env[63371]: INFO nova.compute.manager [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Took 0.72 seconds to destroy the instance on the hypervisor. [ 1742.290222] env[63371]: DEBUG oslo.service.loopingcall [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1742.290658] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a497862b-4082-4d46-9edb-88fbff646f9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.306054] env[63371]: DEBUG nova.compute.manager [-] [instance: 158259a4-f54a-4192-b235-f03838193516] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1742.306180] env[63371]: DEBUG nova.network.neutron [-] [instance: 158259a4-f54a-4192-b235-f03838193516] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1742.315441] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1742.315441] env[63371]: value = "task-1774646" [ 1742.315441] env[63371]: _type = "Task" [ 1742.315441] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.325553] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774646, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.555497] env[63371]: DEBUG nova.objects.instance [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lazy-loading 'flavor' on Instance uuid 11527051-7a4f-481a-b5ed-14550c550c4e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1742.728746] env[63371]: DEBUG nova.compute.utils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1742.732551] env[63371]: INFO nova.compute.claims [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1742.735796] env[63371]: DEBUG nova.compute.manager [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Not allocating networking since 'none' was specified. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1978}} [ 1742.766073] env[63371]: DEBUG oslo_concurrency.lockutils [None req-902e6ac2-9e52-4b68-8f0c-a5857c8879b4 tempest-ServersTestJSON-1299687012 tempest-ServersTestJSON-1299687012-project-member] Lock "44a392e4-32c1-4aaf-8dc0-7df50c1a28c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.857s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.790811] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 61a44b0c-86fc-4f1c-a102-61eaff509d20] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1742.829322] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774646, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.852383] env[63371]: DEBUG nova.compute.manager [req-06474c0a-16ea-4dca-901c-49878ce6a491 req-1e7a0924-4ff3-4ef9-90c2-2733c1c44bac service nova] [instance: 158259a4-f54a-4192-b235-f03838193516] Received event network-vif-deleted-0169bee8-0cc2-4add-b53b-0dfecac574d2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1742.852613] env[63371]: INFO nova.compute.manager [req-06474c0a-16ea-4dca-901c-49878ce6a491 req-1e7a0924-4ff3-4ef9-90c2-2733c1c44bac service nova] [instance: 158259a4-f54a-4192-b235-f03838193516] Neutron deleted interface 0169bee8-0cc2-4add-b53b-0dfecac574d2; detaching it from the instance and deleting it from the info cache [ 1742.852799] env[63371]: DEBUG nova.network.neutron [req-06474c0a-16ea-4dca-901c-49878ce6a491 req-1e7a0924-4ff3-4ef9-90c2-2733c1c44bac service nova] [instance: 158259a4-f54a-4192-b235-f03838193516] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.062502] env[63371]: DEBUG oslo_concurrency.lockutils [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "refresh_cache-11527051-7a4f-481a-b5ed-14550c550c4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1743.063572] env[63371]: DEBUG oslo_concurrency.lockutils [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquired lock "refresh_cache-11527051-7a4f-481a-b5ed-14550c550c4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1743.063867] env[63371]: DEBUG nova.network.neutron [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1743.064172] env[63371]: DEBUG nova.objects.instance [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lazy-loading 'info_cache' on Instance uuid 11527051-7a4f-481a-b5ed-14550c550c4e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1743.184149] env[63371]: DEBUG nova.network.neutron [-] [instance: 158259a4-f54a-4192-b235-f03838193516] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1743.238690] env[63371]: INFO nova.compute.resource_tracker [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating resource usage from migration 1bad7cd8-a319-4561-8560-524cc376e5e2 [ 1743.241568] env[63371]: DEBUG nova.compute.manager [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1743.295150] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ee3ea0ef-cde9-4326-b564-1aa216e00751] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1743.329027] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774646, 'name': ReconfigVM_Task, 'duration_secs': 0.715482} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.329310] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 485a2d6a-1b58-470d-9dc5-8cf31b6726ef/485a2d6a-1b58-470d-9dc5-8cf31b6726ef.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1743.330706] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f077e5a6-30c6-4942-affc-44d3fb4f918b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.338025] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1743.338025] env[63371]: value = "task-1774647" [ 1743.338025] env[63371]: _type = "Task" [ 1743.338025] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.349745] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774647, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.357368] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-335cf05e-4731-4ae7-a22a-e1331e15e50a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.367543] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f319d166-bdfa-4e29-bade-9bc4b435025e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.409267] env[63371]: DEBUG nova.compute.manager [req-06474c0a-16ea-4dca-901c-49878ce6a491 req-1e7a0924-4ff3-4ef9-90c2-2733c1c44bac service nova] [instance: 158259a4-f54a-4192-b235-f03838193516] Detach interface failed, port_id=0169bee8-0cc2-4add-b53b-0dfecac574d2, reason: Instance 158259a4-f54a-4192-b235-f03838193516 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1743.573154] env[63371]: DEBUG nova.objects.base [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Object Instance<11527051-7a4f-481a-b5ed-14550c550c4e> lazy-loaded attributes: flavor,info_cache {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1743.618552] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43cd574a-b143-4fc8-bd10-a56469eb66f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.632210] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2442b3e9-92dc-421a-aa58-ce2e58c84dd6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.663243] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52cfaea-7cbe-493c-a6ff-e4580d316890 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.671828] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1526d833-21de-4468-9065-116595c3d221 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.685472] env[63371]: DEBUG nova.compute.provider_tree [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1743.689963] env[63371]: INFO nova.compute.manager [-] [instance: 158259a4-f54a-4192-b235-f03838193516] Took 1.38 seconds to deallocate network for instance. [ 1743.803216] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 36d5c00a-4762-4801-aff1-0a22e336730a] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1743.848360] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774647, 'name': Rename_Task, 'duration_secs': 0.217085} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1743.848688] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1743.848978] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed6d7666-57a0-4d10-beca-142bac9c1491 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.856800] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1743.856800] env[63371]: value = "task-1774648" [ 1743.856800] env[63371]: _type = "Task" [ 1743.856800] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.866850] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774648, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.972618] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "interface-d00602b9-16bf-4c11-bc47-6076dddbf159-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.972977] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-d00602b9-16bf-4c11-bc47-6076dddbf159-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.973259] env[63371]: DEBUG nova.objects.instance [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'flavor' on Instance uuid d00602b9-16bf-4c11-bc47-6076dddbf159 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1744.137163] env[63371]: INFO nova.compute.manager [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Rebuilding instance [ 1744.189355] env[63371]: DEBUG nova.scheduler.client.report [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1744.197926] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.206166] env[63371]: DEBUG nova.compute.manager [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1744.207404] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce60636-d92f-4041-9e28-9415e0d14476 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.250945] env[63371]: DEBUG nova.compute.manager [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1744.275025] env[63371]: DEBUG nova.virt.hardware [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1744.275292] env[63371]: DEBUG nova.virt.hardware [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1744.275448] env[63371]: DEBUG nova.virt.hardware [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1744.275630] env[63371]: DEBUG nova.virt.hardware [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1744.275773] env[63371]: DEBUG nova.virt.hardware [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1744.275919] env[63371]: DEBUG nova.virt.hardware [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1744.276137] env[63371]: DEBUG nova.virt.hardware [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1744.276292] env[63371]: DEBUG nova.virt.hardware [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1744.276529] env[63371]: DEBUG nova.virt.hardware [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1744.276944] env[63371]: DEBUG nova.virt.hardware [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1744.278261] env[63371]: DEBUG nova.virt.hardware [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1744.278811] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e68005df-073a-469a-a628-01ec26c820e7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.287596] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25da8367-6eb8-4167-8e49-040a2f7e96e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.301403] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1744.307220] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Creating folder: Project (bbb876cc30e84abda7d17d8969d20ebc). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1744.310089] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3065fc71-f127-43b7-83b7-70140f29965b] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1744.312642] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d5677f0-689f-418a-8545-a9dffd445a88 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.322781] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Created folder: Project (bbb876cc30e84abda7d17d8969d20ebc) in parent group-v368199. [ 1744.323693] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Creating folder: Instances. Parent ref: group-v368456. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1744.324207] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6652cd55-72fe-4034-ab78-23eca085db5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.333141] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Created folder: Instances in parent group-v368456. [ 1744.333387] env[63371]: DEBUG oslo.service.loopingcall [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1744.334158] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1744.334371] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0bd7ea51-779e-49a0-8b1b-bb2f1a5ab6fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.352759] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1744.352759] env[63371]: value = "task-1774651" [ 1744.352759] env[63371]: _type = "Task" [ 1744.352759] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.364194] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774651, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.367811] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774648, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.441235] env[63371]: DEBUG nova.network.neutron [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Updating instance_info_cache with network_info: [{"id": "5d6f97e2-eb9c-468d-8931-77a4c10ff125", "address": "fa:16:3e:12:fb:cf", "network": {"id": "57b9c8c9-145c-4988-8307-0c44f962835e", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-309269171-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cdcb553167e84358b2f89a0eb9fe09ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d733acc2-07d0-479e-918c-ec8a21925389", "external-id": "nsx-vlan-transportzone-459", "segmentation_id": 459, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d6f97e2-eb", "ovs_interfaceid": "5d6f97e2-eb9c-468d-8931-77a4c10ff125", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1744.619268] env[63371]: DEBUG nova.objects.instance [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'pci_requests' on Instance uuid d00602b9-16bf-4c11-bc47-6076dddbf159 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1744.700165] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.474s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.700473] env[63371]: INFO nova.compute.manager [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Migrating [ 1744.709886] env[63371]: DEBUG oslo_concurrency.lockutils [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.327s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.710220] env[63371]: DEBUG oslo_concurrency.lockutils [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.714916] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.938s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.716314] env[63371]: DEBUG nova.objects.instance [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lazy-loading 'pci_requests' on Instance uuid 9985dbcd-4498-4629-aae5-5e1933307c50 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1744.727300] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1744.735478] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b6644cf1-e83e-4128-ba8e-1db05beb15be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.742718] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Waiting for the task: (returnval){ [ 1744.742718] env[63371]: value = "task-1774652" [ 1744.742718] env[63371]: _type = "Task" [ 1744.742718] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.747660] env[63371]: INFO nova.scheduler.client.report [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Deleted allocations for instance 935cf583-ecde-4a10-a773-6ff765e5bb49 [ 1744.759030] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774652, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.815530] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3c6294ae-9a16-4f1e-abd4-1aec224625ac] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1744.866133] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774651, 'name': CreateVM_Task, 'duration_secs': 0.39469} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.866736] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1744.868949] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.868949] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.868949] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1744.871455] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-253de1e0-1cf7-46c9-9c02-7d32b17585b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.873493] env[63371]: DEBUG oslo_vmware.api [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774648, 'name': PowerOnVM_Task, 'duration_secs': 0.741039} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.873801] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1744.874058] env[63371]: INFO nova.compute.manager [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Took 10.13 seconds to spawn the instance on the hypervisor. [ 1744.874249] env[63371]: DEBUG nova.compute.manager [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1744.875432] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcca79f-8e51-4dc7-aae9-3f2614cb302d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.879476] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1744.879476] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b377c3-4699-85cb-e326-cd155a57649f" [ 1744.879476] env[63371]: _type = "Task" [ 1744.879476] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.894123] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b377c3-4699-85cb-e326-cd155a57649f, 'name': SearchDatastore_Task, 'duration_secs': 0.013008} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.894413] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1744.894702] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1744.894934] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.895145] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.895339] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1744.895578] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cddb4adb-9552-4fc1-94b8-0bfb061e6391 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.904860] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1744.905202] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1744.905989] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fe7219e-982e-4888-b5d7-494681169300 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.912513] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1744.912513] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5223b2ef-0c44-474e-22e0-c5adbf76210c" [ 1744.912513] env[63371]: _type = "Task" [ 1744.912513] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.921583] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5223b2ef-0c44-474e-22e0-c5adbf76210c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.945598] env[63371]: DEBUG oslo_concurrency.lockutils [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Releasing lock "refresh_cache-11527051-7a4f-481a-b5ed-14550c550c4e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.122179] env[63371]: DEBUG nova.objects.base [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1745.122473] env[63371]: DEBUG nova.network.neutron [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1745.165772] env[63371]: DEBUG nova.policy [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aacd81490704110b6cc6aba338883a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a5897667b6b47deb7ff5b64f9499f36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1745.220671] env[63371]: DEBUG nova.objects.instance [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lazy-loading 'numa_topology' on Instance uuid 9985dbcd-4498-4629-aae5-5e1933307c50 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1745.233876] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1745.234425] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1745.234425] env[63371]: DEBUG nova.network.neutron [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1745.252845] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774652, 'name': PowerOffVM_Task, 'duration_secs': 0.219468} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.253125] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1745.253779] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1745.254460] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c9b05d9-bf37-4319-bfbd-018074c3b251 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.259026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-27c42ee7-39e7-42a2-94f7-4b2442a41265 tempest-ImagesTestJSON-1893767495 tempest-ImagesTestJSON-1893767495-project-member] Lock "935cf583-ecde-4a10-a773-6ff765e5bb49" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.871s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.264016] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Waiting for the task: (returnval){ [ 1745.264016] env[63371]: value = "task-1774653" [ 1745.264016] env[63371]: _type = "Task" [ 1745.264016] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.272485] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774653, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.319718] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e1bc4623-f6b5-4440-a58d-594e9cbe3628] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1745.398214] env[63371]: INFO nova.compute.manager [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Took 32.70 seconds to build instance. [ 1745.422150] env[63371]: DEBUG nova.network.neutron [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Successfully created port: 57835801-cbba-4176-8f6b-8d0ec76aa66e {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1745.430334] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5223b2ef-0c44-474e-22e0-c5adbf76210c, 'name': SearchDatastore_Task, 'duration_secs': 0.014067} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.432338] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0a47bf9-5995-4be5-8975-6bd3f2666ede {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.436086] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1745.436086] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ebc202-7069-30b8-f140-c1643f3251bd" [ 1745.436086] env[63371]: _type = "Task" [ 1745.436086] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.448520] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ebc202-7069-30b8-f140-c1643f3251bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.451163] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1745.451427] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-288398ca-d4b0-439b-a056-dba13e840152 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.458072] env[63371]: DEBUG oslo_vmware.api [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1745.458072] env[63371]: value = "task-1774654" [ 1745.458072] env[63371]: _type = "Task" [ 1745.458072] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.466301] env[63371]: DEBUG oslo_vmware.api [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774654, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.724377] env[63371]: INFO nova.compute.claims [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1745.778205] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1745.778446] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1745.778517] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368346', 'volume_id': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'name': 'volume-32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c03e2dc4-75d9-4fbb-afc8-046cbbf908ac', 'attached_at': '', 'detached_at': '', 'volume_id': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'serial': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1745.779401] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37309b29-980f-4ad9-a988-40eb3175a66f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.801175] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250efc20-c9f5-407c-91a5-d2f32a705513 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.808978] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a43afc4-596a-4471-b050-98dc30e236a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.828377] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 0cd2018f-7a54-4458-b5fd-353ab75ffbfd] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1745.830823] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab939f5c-2639-4da1-a38a-97df8eeedef5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.849689] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] The volume has not been displaced from its original location: [datastore1] volume-32d41ea7-8d37-4108-a5fd-9dd5e6d351de/volume-32d41ea7-8d37-4108-a5fd-9dd5e6d351de.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1745.855236] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Reconfiguring VM instance instance-00000040 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1745.858263] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-864e173b-3a8a-4081-9516-e8d9c6d49fd7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.878483] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Waiting for the task: (returnval){ [ 1745.878483] env[63371]: value = "task-1774655" [ 1745.878483] env[63371]: _type = "Task" [ 1745.878483] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.889132] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774655, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.900447] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f13bf3d9-b33b-4495-912c-9d9a8a463f6a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "485a2d6a-1b58-470d-9dc5-8cf31b6726ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.210s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.950427] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ebc202-7069-30b8-f140-c1643f3251bd, 'name': SearchDatastore_Task, 'duration_secs': 0.030128} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.950781] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1745.951095] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad/e16e4a55-4198-4308-b12c-d9ac07daecad.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1745.951405] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9d2b588c-3969-4366-a833-9b7f189c3ca5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.957832] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1745.957832] env[63371]: value = "task-1774656" [ 1745.957832] env[63371]: _type = "Task" [ 1745.957832] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.974470] env[63371]: DEBUG oslo_vmware.api [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774654, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.975554] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774656, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.017558] env[63371]: DEBUG nova.network.neutron [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance_info_cache with network_info: [{"id": "e144cd6b-c3f5-496e-99c6-19e9ab58c042", "address": "fa:16:3e:99:d0:57", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape144cd6b-c3", "ovs_interfaceid": "e144cd6b-c3f5-496e-99c6-19e9ab58c042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.334836] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3027832f-12cd-4255-b699-bcbb254a6c5a] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1746.388205] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774655, 'name': ReconfigVM_Task, 'duration_secs': 0.253435} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.388474] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Reconfigured VM instance instance-00000040 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1746.393337] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e15f3ae0-7c37-4f66-98d1-e9db0bd83d9f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.409691] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Waiting for the task: (returnval){ [ 1746.409691] env[63371]: value = "task-1774657" [ 1746.409691] env[63371]: _type = "Task" [ 1746.409691] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.419556] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774657, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.467642] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774656, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.477039] env[63371]: DEBUG oslo_vmware.api [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774654, 'name': PowerOnVM_Task, 'duration_secs': 0.758299} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.477346] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1746.477533] env[63371]: DEBUG nova.compute.manager [None req-995e0e25-0374-4744-a59f-7a192fe03969 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1746.478566] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0dc90d5-d8bc-475c-90fd-23ca2fcc5081 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.520990] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1746.838664] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e781866e-9b26-47c7-b1a6-d6d9547bf2fd] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1746.920781] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774657, 'name': ReconfigVM_Task, 'duration_secs': 0.165759} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.923886] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368346', 'volume_id': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'name': 'volume-32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c03e2dc4-75d9-4fbb-afc8-046cbbf908ac', 'attached_at': '', 'detached_at': '', 'volume_id': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de', 'serial': '32d41ea7-8d37-4108-a5fd-9dd5e6d351de'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1746.923981] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1746.924946] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e30b7e33-f9fa-4334-9e19-055e20f999e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.932115] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1746.932349] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22ad9fca-2f9b-4a66-9552-9a37ca8386e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.970509] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774656, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.574032} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1746.971151] env[63371]: DEBUG nova.network.neutron [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Successfully updated port: 57835801-cbba-4176-8f6b-8d0ec76aa66e {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1746.972270] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad/e16e4a55-4198-4308-b12c-d9ac07daecad.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1746.972497] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1746.972966] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb5a8cde-ed45-471e-b5c8-1c545bd8d872 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.983164] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1746.983164] env[63371]: value = "task-1774659" [ 1746.983164] env[63371]: _type = "Task" [ 1746.983164] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.998304] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774659, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.033068] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1747.033302] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1747.033501] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Deleting the datastore file [datastore1] c03e2dc4-75d9-4fbb-afc8-046cbbf908ac {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1747.033772] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-497bf367-daba-4bc1-9f04-8b23b4cffe11 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.040455] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Waiting for the task: (returnval){ [ 1747.040455] env[63371]: value = "task-1774660" [ 1747.040455] env[63371]: _type = "Task" [ 1747.040455] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.042011] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315dcc6d-edd8-4362-ba8a-71d247817810 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.055390] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90e50e70-bf3c-44c8-b440-dcb04b069067 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.058437] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774660, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.086935] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41d604a-b5d4-4f74-ad05-131916927388 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.095460] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae79db0f-c53f-4a80-a9f9-d8a5635012dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.110028] env[63371]: DEBUG nova.compute.provider_tree [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1747.139424] env[63371]: DEBUG nova.compute.manager [req-7763cc78-9a73-4d98-830f-0d6ae8dfde3e req-1c5cabe2-c283-4145-8f1c-bd9757f4eeb9 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received event network-vif-plugged-57835801-cbba-4176-8f6b-8d0ec76aa66e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1747.139658] env[63371]: DEBUG oslo_concurrency.lockutils [req-7763cc78-9a73-4d98-830f-0d6ae8dfde3e req-1c5cabe2-c283-4145-8f1c-bd9757f4eeb9 service nova] Acquiring lock "d00602b9-16bf-4c11-bc47-6076dddbf159-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.139867] env[63371]: DEBUG oslo_concurrency.lockutils [req-7763cc78-9a73-4d98-830f-0d6ae8dfde3e req-1c5cabe2-c283-4145-8f1c-bd9757f4eeb9 service nova] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.139997] env[63371]: DEBUG oslo_concurrency.lockutils [req-7763cc78-9a73-4d98-830f-0d6ae8dfde3e req-1c5cabe2-c283-4145-8f1c-bd9757f4eeb9 service nova] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.140179] env[63371]: DEBUG nova.compute.manager [req-7763cc78-9a73-4d98-830f-0d6ae8dfde3e req-1c5cabe2-c283-4145-8f1c-bd9757f4eeb9 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] No waiting events found dispatching network-vif-plugged-57835801-cbba-4176-8f6b-8d0ec76aa66e {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1747.140338] env[63371]: WARNING nova.compute.manager [req-7763cc78-9a73-4d98-830f-0d6ae8dfde3e req-1c5cabe2-c283-4145-8f1c-bd9757f4eeb9 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received unexpected event network-vif-plugged-57835801-cbba-4176-8f6b-8d0ec76aa66e for instance with vm_state active and task_state None. [ 1747.180577] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "485a2d6a-1b58-470d-9dc5-8cf31b6726ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.180857] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "485a2d6a-1b58-470d-9dc5-8cf31b6726ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.181177] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "485a2d6a-1b58-470d-9dc5-8cf31b6726ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.181381] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "485a2d6a-1b58-470d-9dc5-8cf31b6726ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.181563] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "485a2d6a-1b58-470d-9dc5-8cf31b6726ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.183633] env[63371]: INFO nova.compute.manager [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Terminating instance [ 1747.185496] env[63371]: DEBUG nova.compute.manager [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1747.185729] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1747.186597] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6427e6-b4ac-4aa1-b3f2-1c3ecc63a6c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.194394] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1747.194586] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d073e297-30af-4ca4-9e61-3b400b824a08 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.200827] env[63371]: DEBUG oslo_vmware.api [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1747.200827] env[63371]: value = "task-1774661" [ 1747.200827] env[63371]: _type = "Task" [ 1747.200827] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.210910] env[63371]: DEBUG oslo_vmware.api [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774661, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.342868] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: fc88ca3f-6c6e-44f0-8d62-0b06f4cfba9e] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1747.474522] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1747.474722] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1747.474904] env[63371]: DEBUG nova.network.neutron [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1747.492842] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774659, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.194277} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.493121] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1747.494041] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea4115ef-455d-440f-be05-5898b91632da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.513652] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad/e16e4a55-4198-4308-b12c-d9ac07daecad.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1747.514185] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac98180b-2d46-41ce-8dc6-b9a7bf44ca5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.533639] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1747.533639] env[63371]: value = "task-1774662" [ 1747.533639] env[63371]: _type = "Task" [ 1747.533639] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.542190] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774662, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.551804] env[63371]: DEBUG oslo_vmware.api [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Task: {'id': task-1774660, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281143} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.552122] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1747.552243] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1747.552408] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1747.608263] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1747.608626] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9338bb05-11ca-42c0-a914-269e0a2fbc31 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.613450] env[63371]: DEBUG nova.scheduler.client.report [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1747.621040] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9374c70e-1001-4468-9155-1db28890f4a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.656424] env[63371]: ERROR nova.compute.manager [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Failed to detach volume 32d41ea7-8d37-4108-a5fd-9dd5e6d351de from /dev/sda: nova.exception.InstanceNotFound: Instance c03e2dc4-75d9-4fbb-afc8-046cbbf908ac could not be found. [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Traceback (most recent call last): [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] self.driver.rebuild(**kwargs) [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] raise NotImplementedError() [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] NotImplementedError [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] During handling of the above exception, another exception occurred: [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Traceback (most recent call last): [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] self.driver.detach_volume(context, old_connection_info, [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] return self._volumeops.detach_volume(connection_info, instance) [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] self._detach_volume_vmdk(connection_info, instance) [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] stable_ref.fetch_moref(session) [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] nova.exception.InstanceNotFound: Instance c03e2dc4-75d9-4fbb-afc8-046cbbf908ac could not be found. [ 1747.656424] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] [ 1747.716421] env[63371]: DEBUG oslo_vmware.api [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774661, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.802712] env[63371]: DEBUG nova.compute.utils [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Build of instance c03e2dc4-75d9-4fbb-afc8-046cbbf908ac aborted: Failed to rebuild volume backed instance. {{(pid=63371) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1747.805161] env[63371]: ERROR nova.compute.manager [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance c03e2dc4-75d9-4fbb-afc8-046cbbf908ac aborted: Failed to rebuild volume backed instance. [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Traceback (most recent call last): [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/compute/manager.py", line 4143, in _do_rebuild_instance [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] self.driver.rebuild(**kwargs) [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] raise NotImplementedError() [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] NotImplementedError [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] During handling of the above exception, another exception occurred: [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Traceback (most recent call last): [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/compute/manager.py", line 3601, in _rebuild_volume_backed_instance [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] self._detach_root_volume(context, instance, root_bdm) [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/compute/manager.py", line 3580, in _detach_root_volume [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] with excutils.save_and_reraise_exception(): [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] self.force_reraise() [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] raise self.value [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/compute/manager.py", line 3566, in _detach_root_volume [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] self.driver.detach_volume(context, old_connection_info, [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 559, in detach_volume [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] return self._volumeops.detach_volume(connection_info, instance) [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] self._detach_volume_vmdk(connection_info, instance) [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] stable_ref.fetch_moref(session) [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] nova.exception.InstanceNotFound: Instance c03e2dc4-75d9-4fbb-afc8-046cbbf908ac could not be found. [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] During handling of the above exception, another exception occurred: [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Traceback (most recent call last): [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/compute/manager.py", line 10866, in _error_out_instance_on_exception [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] yield [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/compute/manager.py", line 3869, in rebuild_instance [ 1747.805161] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] self._do_rebuild_instance_with_claim( [ 1747.806243] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/compute/manager.py", line 3955, in _do_rebuild_instance_with_claim [ 1747.806243] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] self._do_rebuild_instance( [ 1747.806243] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/compute/manager.py", line 4147, in _do_rebuild_instance [ 1747.806243] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] self._rebuild_default_impl(**kwargs) [ 1747.806243] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/compute/manager.py", line 3724, in _rebuild_default_impl [ 1747.806243] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] self._rebuild_volume_backed_instance( [ 1747.806243] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] File "/opt/stack/nova/nova/compute/manager.py", line 3616, in _rebuild_volume_backed_instance [ 1747.806243] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] raise exception.BuildAbortException( [ 1747.806243] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] nova.exception.BuildAbortException: Build of instance c03e2dc4-75d9-4fbb-afc8-046cbbf908ac aborted: Failed to rebuild volume backed instance. [ 1747.806243] env[63371]: ERROR nova.compute.manager [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] [ 1747.846302] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3bd1c148-a48d-402c-bd76-2cb1d38b49f7] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1748.011085] env[63371]: WARNING nova.network.neutron [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] 78c77028-c23a-4160-8b08-d336e8101b3b already exists in list: networks containing: ['78c77028-c23a-4160-8b08-d336e8101b3b']. ignoring it [ 1748.035749] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1c5bd74-f30c-4e49-8235-0fa2f0ca6266 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.061328] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774662, 'name': ReconfigVM_Task, 'duration_secs': 0.295459} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.061328] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance '3a6c12a7-732f-4a73-a8c5-6810b554cc03' progress to 0 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1748.064704] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Reconfigured VM instance instance-0000005f to attach disk [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad/e16e4a55-4198-4308-b12c-d9ac07daecad.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1748.065324] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0fe95509-e029-4617-96c4-6af0b106556c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.072564] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1748.072564] env[63371]: value = "task-1774663" [ 1748.072564] env[63371]: _type = "Task" [ 1748.072564] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.084481] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774663, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.122023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.407s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1748.124031] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 16.445s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.170178] env[63371]: INFO nova.network.neutron [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updating port d3f41a80-52de-46a5-ac15-9a26e6710908 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1748.212974] env[63371]: DEBUG oslo_vmware.api [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774661, 'name': PowerOffVM_Task, 'duration_secs': 0.553687} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.213265] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1748.213431] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1748.213706] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3edee1d7-40cc-4582-9146-65c7f301d516 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.319222] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1748.323705] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1748.323705] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleting the datastore file [datastore1] 485a2d6a-1b58-470d-9dc5-8cf31b6726ef {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1748.323705] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf95d3cf-e556-48e5-a121-d997af444157 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.331044] env[63371]: DEBUG oslo_vmware.api [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1748.331044] env[63371]: value = "task-1774665" [ 1748.331044] env[63371]: _type = "Task" [ 1748.331044] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.343043] env[63371]: DEBUG oslo_vmware.api [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774665, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.348811] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 12e393d7-e8d5-4a9a-bad7-3cfffbb9d956] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1748.504393] env[63371]: DEBUG nova.network.neutron [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updating instance_info_cache with network_info: [{"id": "bc8b891d-040a-4a55-a281-311c08ae828d", "address": "fa:16:3e:ea:27:0c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8b891d-04", "ovs_interfaceid": "bc8b891d-040a-4a55-a281-311c08ae828d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "57835801-cbba-4176-8f6b-8d0ec76aa66e", "address": "fa:16:3e:29:75:d5", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57835801-cb", "ovs_interfaceid": "57835801-cbba-4176-8f6b-8d0ec76aa66e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1748.569538] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1748.569861] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca583b02-f408-4ee3-9aff-656e66f4bbee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.578078] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1748.578078] env[63371]: value = "task-1774666" [ 1748.578078] env[63371]: _type = "Task" [ 1748.578078] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.584629] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774663, 'name': Rename_Task, 'duration_secs': 0.131856} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.585285] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1748.585529] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4537aa72-ee04-4ccf-89f1-bbd745016c9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.590298] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774666, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.595814] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1748.595814] env[63371]: value = "task-1774667" [ 1748.595814] env[63371]: _type = "Task" [ 1748.595814] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1748.616044] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774667, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.630020] env[63371]: INFO nova.compute.claims [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1748.840723] env[63371]: DEBUG oslo_vmware.api [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774665, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389526} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.841754] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1748.841964] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1748.842176] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1748.842390] env[63371]: INFO nova.compute.manager [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1748.842644] env[63371]: DEBUG oslo.service.loopingcall [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1748.843096] env[63371]: DEBUG nova.compute.manager [-] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1748.843196] env[63371]: DEBUG nova.network.neutron [-] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1748.851221] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 1276e001-fb07-4367-8b03-81c5fe5fbd0d] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1749.008345] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1749.008609] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.008761] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.009732] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3bd4a0b-b670-4a94-89c9-22c935c9d8b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.027246] env[63371]: DEBUG nova.virt.hardware [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1749.027486] env[63371]: DEBUG nova.virt.hardware [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1749.027644] env[63371]: DEBUG nova.virt.hardware [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1749.027822] env[63371]: DEBUG nova.virt.hardware [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1749.027975] env[63371]: DEBUG nova.virt.hardware [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1749.028462] env[63371]: DEBUG nova.virt.hardware [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1749.028462] env[63371]: DEBUG nova.virt.hardware [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1749.028462] env[63371]: DEBUG nova.virt.hardware [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1749.028625] env[63371]: DEBUG nova.virt.hardware [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1749.028766] env[63371]: DEBUG nova.virt.hardware [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1749.028933] env[63371]: DEBUG nova.virt.hardware [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1749.035488] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Reconfiguring VM to attach interface {{(pid=63371) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1749.036133] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb765a19-dc71-4593-9fe6-50ec528c8430 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.053757] env[63371]: DEBUG oslo_vmware.api [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1749.053757] env[63371]: value = "task-1774668" [ 1749.053757] env[63371]: _type = "Task" [ 1749.053757] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.061629] env[63371]: DEBUG oslo_vmware.api [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774668, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.088087] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774666, 'name': PowerOffVM_Task, 'duration_secs': 0.187009} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.088445] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1749.088718] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance '3a6c12a7-732f-4a73-a8c5-6810b554cc03' progress to 17 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1749.107916] env[63371]: DEBUG oslo_vmware.api [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774667, 'name': PowerOnVM_Task, 'duration_secs': 0.44524} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1749.107916] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1749.107916] env[63371]: INFO nova.compute.manager [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Took 4.85 seconds to spawn the instance on the hypervisor. [ 1749.107916] env[63371]: DEBUG nova.compute.manager [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1749.107916] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966290f2-b190-4b29-92d5-0b73f72e4277 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.136151] env[63371]: INFO nova.compute.resource_tracker [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating resource usage from migration c0049d9e-3f16-4dab-89a1-5e74800f317c [ 1749.356848] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 1cb18f2a-6476-4492-8576-7b0fd693a107] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1749.426140] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00f07e0-269e-405b-8529-49e231c0a058 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.434295] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d692151-6682-4b10-a310-e073d73b08c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.469190] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024861dd-a355-470f-8916-0b5004e0a6c7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.478674] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37609805-70a0-4369-a629-18475ed0e27f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.492392] env[63371]: DEBUG nova.compute.provider_tree [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1749.564153] env[63371]: DEBUG oslo_vmware.api [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774668, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.596515] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1749.596684] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1749.596822] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1749.597013] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1749.597196] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1749.597311] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1749.597508] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1749.597663] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1749.597827] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1749.597982] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1749.598769] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1749.605458] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49132aa2-79cd-48c6-81fe-1b4bf531d2db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1749.618571] env[63371]: DEBUG nova.network.neutron [-] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1749.629037] env[63371]: INFO nova.compute.manager [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Took 24.15 seconds to build instance. [ 1749.629037] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1749.629037] env[63371]: value = "task-1774669" [ 1749.629037] env[63371]: _type = "Task" [ 1749.629037] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1749.641251] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774669, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.824173] env[63371]: DEBUG oslo_concurrency.lockutils [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1749.836531] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1749.836769] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1749.836935] env[63371]: DEBUG nova.network.neutron [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1749.860232] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e05c7187-b4d6-481e-8bce-deb557dde6a8] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1749.995109] env[63371]: DEBUG nova.scheduler.client.report [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1750.026853] env[63371]: DEBUG nova.compute.manager [req-75eb71d5-eab2-42c6-ae01-5f493558fa4a req-5d39cc7b-1a09-4a24-acc9-81ccbd0bb2fe service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received event network-changed-57835801-cbba-4176-8f6b-8d0ec76aa66e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1750.026853] env[63371]: DEBUG nova.compute.manager [req-75eb71d5-eab2-42c6-ae01-5f493558fa4a req-5d39cc7b-1a09-4a24-acc9-81ccbd0bb2fe service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Refreshing instance network info cache due to event network-changed-57835801-cbba-4176-8f6b-8d0ec76aa66e. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1750.027181] env[63371]: DEBUG oslo_concurrency.lockutils [req-75eb71d5-eab2-42c6-ae01-5f493558fa4a req-5d39cc7b-1a09-4a24-acc9-81ccbd0bb2fe service nova] Acquiring lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1750.027382] env[63371]: DEBUG oslo_concurrency.lockutils [req-75eb71d5-eab2-42c6-ae01-5f493558fa4a req-5d39cc7b-1a09-4a24-acc9-81ccbd0bb2fe service nova] Acquired lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1750.027624] env[63371]: DEBUG nova.network.neutron [req-75eb71d5-eab2-42c6-ae01-5f493558fa4a req-5d39cc7b-1a09-4a24-acc9-81ccbd0bb2fe service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Refreshing network info cache for port 57835801-cbba-4176-8f6b-8d0ec76aa66e {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1750.066344] env[63371]: DEBUG oslo_vmware.api [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774668, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.123281] env[63371]: INFO nova.compute.manager [-] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Took 1.28 seconds to deallocate network for instance. [ 1750.135022] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c0da5d37-4f0a-4eb7-bb8a-be1a58e975e1 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lock "e16e4a55-4198-4308-b12c-d9ac07daecad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.670s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.140182] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774669, 'name': ReconfigVM_Task, 'duration_secs': 0.214005} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.140662] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance '3a6c12a7-732f-4a73-a8c5-6810b554cc03' progress to 33 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1750.364178] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 704978f9-3b24-4a73-8f64-b8e3e9e94a04] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1750.500295] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.376s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.500532] env[63371]: INFO nova.compute.manager [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Migrating [ 1750.510541] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.196s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.511247] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.512885] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.768s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.513139] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.514929] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.901s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.516684] env[63371]: INFO nova.compute.claims [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1750.554082] env[63371]: INFO nova.scheduler.client.report [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Deleted allocations for instance c04edf6d-8a07-4776-be0f-b763fb3059d2 [ 1750.566242] env[63371]: INFO nova.scheduler.client.report [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Deleted allocations for instance 44cc8606-24f5-4f6b-b96f-3559c9c3f06e [ 1750.579790] env[63371]: DEBUG oslo_vmware.api [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774668, 'name': ReconfigVM_Task, 'duration_secs': 1.442133} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.584136] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1750.584136] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Reconfigured VM to attach interface {{(pid=63371) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1750.630967] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.647757] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1750.647992] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1750.648157] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1750.648334] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1750.649019] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1750.649019] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1750.649019] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1750.649019] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1750.649736] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1750.649955] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1750.650150] env[63371]: DEBUG nova.virt.hardware [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1750.658996] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Reconfiguring VM instance instance-0000005a to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1750.659814] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b12d97c-6cbd-4727-81ad-307d361fabe1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.688473] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1750.688473] env[63371]: value = "task-1774670" [ 1750.688473] env[63371]: _type = "Task" [ 1750.688473] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1750.695471] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774670, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.760161] env[63371]: DEBUG nova.network.neutron [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updating instance_info_cache with network_info: [{"id": "d3f41a80-52de-46a5-ac15-9a26e6710908", "address": "fa:16:3e:f6:cd:6b", "network": {"id": "9c25e5e9-468d-4d4c-93e0-c9815eff1c2e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-814005109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e693d73d70140c2ba065de2b60838c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3f41a80-52", "ovs_interfaceid": "d3f41a80-52de-46a5-ac15-9a26e6710908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1750.785752] env[63371]: DEBUG oslo_concurrency.lockutils [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Acquiring lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.786864] env[63371]: DEBUG oslo_concurrency.lockutils [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.786964] env[63371]: DEBUG oslo_concurrency.lockutils [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Acquiring lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.787741] env[63371]: DEBUG oslo_concurrency.lockutils [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1750.787741] env[63371]: DEBUG oslo_concurrency.lockutils [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1750.792229] env[63371]: INFO nova.compute.manager [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Terminating instance [ 1750.795407] env[63371]: DEBUG nova.compute.manager [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1750.795741] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-75c0c975-49e9-487b-aacb-7d658e1c8e34 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.809799] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3846ac0f-9f82-4b07-af23-1d082db2001b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.846524] env[63371]: WARNING nova.virt.vmwareapi.driver [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance c03e2dc4-75d9-4fbb-afc8-046cbbf908ac could not be found. [ 1750.846630] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1750.847025] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2c54f64c-be45-4c91-9b12-e11778f09c5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.855955] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad8569f-5530-4091-9e6d-c40d34a70f4d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.867970] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: b880750e-7bf4-412c-bcff-eb2c343f60f0] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1750.898952] env[63371]: WARNING nova.virt.vmwareapi.vmops [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c03e2dc4-75d9-4fbb-afc8-046cbbf908ac could not be found. [ 1750.899180] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1750.899400] env[63371]: INFO nova.compute.manager [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Took 0.10 seconds to destroy the instance on the hypervisor. [ 1750.899608] env[63371]: DEBUG oslo.service.loopingcall [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1750.899850] env[63371]: DEBUG nova.compute.manager [-] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1750.899946] env[63371]: DEBUG nova.network.neutron [-] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1750.952110] env[63371]: DEBUG nova.network.neutron [req-75eb71d5-eab2-42c6-ae01-5f493558fa4a req-5d39cc7b-1a09-4a24-acc9-81ccbd0bb2fe service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updated VIF entry in instance network info cache for port 57835801-cbba-4176-8f6b-8d0ec76aa66e. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1750.952615] env[63371]: DEBUG nova.network.neutron [req-75eb71d5-eab2-42c6-ae01-5f493558fa4a req-5d39cc7b-1a09-4a24-acc9-81ccbd0bb2fe service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updating instance_info_cache with network_info: [{"id": "bc8b891d-040a-4a55-a281-311c08ae828d", "address": "fa:16:3e:ea:27:0c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8b891d-04", "ovs_interfaceid": "bc8b891d-040a-4a55-a281-311c08ae828d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "57835801-cbba-4176-8f6b-8d0ec76aa66e", "address": "fa:16:3e:29:75:d5", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57835801-cb", "ovs_interfaceid": "57835801-cbba-4176-8f6b-8d0ec76aa66e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1751.037461] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1751.038085] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1751.038160] env[63371]: DEBUG nova.network.neutron [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1751.068114] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93a3f11a-b1ab-4d0e-9e3b-f9abc1694069 tempest-ServersTestManualDisk-1370696618 tempest-ServersTestManualDisk-1370696618-project-member] Lock "c04edf6d-8a07-4776-be0f-b763fb3059d2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.004s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.084682] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dada7a7b-16e9-4203-b27f-783bbd3883e6 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "44cc8606-24f5-4f6b-b96f-3559c9c3f06e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.391s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.087203] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d29deec4-5c03-4bc8-847b-1f175689bad9 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-d00602b9-16bf-4c11-bc47-6076dddbf159-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.114s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.195921] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774670, 'name': ReconfigVM_Task, 'duration_secs': 0.206838} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.196189] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Reconfigured VM instance instance-0000005a to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1751.196980] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa7e30f-9a2b-46fd-a788-e2cd0c3eb312 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.223159] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 3a6c12a7-732f-4a73-a8c5-6810b554cc03/3a6c12a7-732f-4a73-a8c5-6810b554cc03.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1751.223502] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b532a9bd-f427-48a2-b0e1-043e0c2166f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.244205] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1751.244205] env[63371]: value = "task-1774671" [ 1751.244205] env[63371]: _type = "Task" [ 1751.244205] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.254295] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774671, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.265014] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Releasing lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.290926] env[63371]: DEBUG nova.virt.hardware [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='372bbfd547ba9e44c310570fa2e7b3ae',container_format='bare',created_at=2024-12-11T21:37:22Z,direct_url=,disk_format='vmdk',id=496b93e2-5142-43b5-a0fc-8e75cb31f472,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-809566942-shelved',owner='2e693d73d70140c2ba065de2b60838c2',properties=ImageMetaProps,protected=,size=31664640,status='active',tags=,updated_at=2024-12-11T21:37:38Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1751.290926] env[63371]: DEBUG nova.virt.hardware [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1751.291124] env[63371]: DEBUG nova.virt.hardware [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1751.291310] env[63371]: DEBUG nova.virt.hardware [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1751.291601] env[63371]: DEBUG nova.virt.hardware [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1751.291906] env[63371]: DEBUG nova.virt.hardware [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1751.292196] env[63371]: DEBUG nova.virt.hardware [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1751.292401] env[63371]: DEBUG nova.virt.hardware [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1751.292651] env[63371]: DEBUG nova.virt.hardware [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1751.292861] env[63371]: DEBUG nova.virt.hardware [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1751.293084] env[63371]: DEBUG nova.virt.hardware [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1751.294019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700c5852-2da8-406e-ae1a-381e780cc542 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.304193] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a8a076-ff00-4531-ab82-5283719a216d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.319350] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:cd:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7514a465-f1a4-4a8b-b76b-726b1a9d7e2f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd3f41a80-52de-46a5-ac15-9a26e6710908', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1751.327958] env[63371]: DEBUG oslo.service.loopingcall [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1751.327958] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1751.328214] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-002d16fc-c074-4d8a-ad4c-e963e661b11a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.351226] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1751.351226] env[63371]: value = "task-1774672" [ 1751.351226] env[63371]: _type = "Task" [ 1751.351226] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.359935] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774672, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.376110] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: d6bc618e-33c9-4b45-b79f-afe6811acd4e] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1751.455516] env[63371]: DEBUG oslo_concurrency.lockutils [req-75eb71d5-eab2-42c6-ae01-5f493558fa4a req-5d39cc7b-1a09-4a24-acc9-81ccbd0bb2fe service nova] Releasing lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1751.455828] env[63371]: DEBUG nova.compute.manager [req-75eb71d5-eab2-42c6-ae01-5f493558fa4a req-5d39cc7b-1a09-4a24-acc9-81ccbd0bb2fe service nova] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Received event network-vif-deleted-2860b658-ff36-48a0-b36c-81ae2f4a6c16 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1751.759577] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774671, 'name': ReconfigVM_Task, 'duration_secs': 0.425042} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1751.762243] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 3a6c12a7-732f-4a73-a8c5-6810b554cc03/3a6c12a7-732f-4a73-a8c5-6810b554cc03.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1751.763147] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance '3a6c12a7-732f-4a73-a8c5-6810b554cc03' progress to 50 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1751.864251] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774672, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.876100] env[63371]: INFO nova.compute.manager [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Rebuilding instance [ 1751.880429] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 1c93487b-6d8f-424d-8b95-10bfb894c609] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1751.906909] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d54efa4-6aca-4315-a7f4-f30618558798 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.919824] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-411af922-5ddf-42ff-8d0f-81d810b6b185 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.959433] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fa81a7-0fdf-4845-9b58-2d1d00858a36 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.962327] env[63371]: DEBUG nova.compute.manager [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1751.963447] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3492ee25-cdeb-48cf-9816-96a871a63af6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.975301] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e56c5fa-5f76-4757-ab49-be39e5d945f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.989850] env[63371]: DEBUG nova.compute.provider_tree [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1752.028901] env[63371]: DEBUG nova.network.neutron [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance_info_cache with network_info: [{"id": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "address": "fa:16:3e:50:09:23", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82aece5e-dc", "ovs_interfaceid": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.185406] env[63371]: DEBUG nova.network.neutron [-] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.274896] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4082c28b-e7fd-4b87-852b-8bd7a0c00ada {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.315911] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58f668a-b553-478b-a1bd-782208ed2edf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.337818] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance '3a6c12a7-732f-4a73-a8c5-6810b554cc03' progress to 67 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1752.343843] env[63371]: DEBUG nova.compute.manager [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Received event network-vif-plugged-d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1752.344095] env[63371]: DEBUG oslo_concurrency.lockutils [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] Acquiring lock "9985dbcd-4498-4629-aae5-5e1933307c50-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.344303] env[63371]: DEBUG oslo_concurrency.lockutils [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] Lock "9985dbcd-4498-4629-aae5-5e1933307c50-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.344462] env[63371]: DEBUG oslo_concurrency.lockutils [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] Lock "9985dbcd-4498-4629-aae5-5e1933307c50-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1752.344619] env[63371]: DEBUG nova.compute.manager [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] No waiting events found dispatching network-vif-plugged-d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1752.344773] env[63371]: WARNING nova.compute.manager [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Received unexpected event network-vif-plugged-d3f41a80-52de-46a5-ac15-9a26e6710908 for instance with vm_state shelved_offloaded and task_state spawning. [ 1752.344936] env[63371]: DEBUG nova.compute.manager [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Received event network-changed-d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1752.346056] env[63371]: DEBUG nova.compute.manager [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Refreshing instance network info cache due to event network-changed-d3f41a80-52de-46a5-ac15-9a26e6710908. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1752.346056] env[63371]: DEBUG oslo_concurrency.lockutils [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] Acquiring lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.346056] env[63371]: DEBUG oslo_concurrency.lockutils [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] Acquired lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.346056] env[63371]: DEBUG nova.network.neutron [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Refreshing network info cache for port d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1752.365991] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774672, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.386166] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 150b1a8d-b8d6-4ebc-a4a3-be8bba6860ed] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1752.480750] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1752.481183] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17efde0a-2ea7-4cc1-a073-e2ef12596360 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.490311] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1752.490311] env[63371]: value = "task-1774673" [ 1752.490311] env[63371]: _type = "Task" [ 1752.490311] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.495032] env[63371]: DEBUG nova.scheduler.client.report [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1752.511787] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774673, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.531205] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.688805] env[63371]: INFO nova.compute.manager [-] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Took 1.79 seconds to deallocate network for instance. [ 1752.871511] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774672, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.888552] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 574121c4-c721-4d30-81ec-3f2310a7b6d1] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1752.999022] env[63371]: DEBUG nova.network.neutron [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Port e144cd6b-c3f5-496e-99c6-19e9ab58c042 binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1753.006148] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774673, 'name': PowerOffVM_Task, 'duration_secs': 0.148879} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.006785] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1753.007306] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1753.008638] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.494s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.009310] env[63371]: DEBUG nova.compute.manager [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1753.014581] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f62659-f48f-444b-962c-c37c8a742134 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.018841] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 12.974s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.019251] env[63371]: DEBUG nova.objects.instance [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 158259a4-f54a-4192-b235-f03838193516] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1753.028993] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1753.029604] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-252d8a8a-524b-4424-a20f-fe98102883fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.057611] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1753.057860] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1753.058185] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Deleting the datastore file [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1753.058630] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c9bac02-b1cb-458f-9a6d-a00343eb6d7c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.066775] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1753.066775] env[63371]: value = "task-1774675" [ 1753.066775] env[63371]: _type = "Task" [ 1753.066775] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.082116] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.254442] env[63371]: INFO nova.compute.manager [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Took 0.57 seconds to detach 1 volumes for instance. [ 1753.256709] env[63371]: DEBUG nova.compute.manager [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Deleting volume: 32d41ea7-8d37-4108-a5fd-9dd5e6d351de {{(pid=63371) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 1753.266780] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "7349ecf6-2de7-4540-b713-7e29cbd3ff0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.266898] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "7349ecf6-2de7-4540-b713-7e29cbd3ff0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.267142] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "7349ecf6-2de7-4540-b713-7e29cbd3ff0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.267222] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "7349ecf6-2de7-4540-b713-7e29cbd3ff0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.267401] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "7349ecf6-2de7-4540-b713-7e29cbd3ff0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.273667] env[63371]: INFO nova.compute.manager [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Terminating instance [ 1753.280974] env[63371]: DEBUG nova.compute.manager [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1753.281255] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1753.282957] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0c6349-3be2-4988-954e-cc1c31d576bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.297199] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1753.297199] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-12b06b4a-c98d-4b58-a0d2-ad337e68ccbd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.303334] env[63371]: DEBUG oslo_vmware.api [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1753.303334] env[63371]: value = "task-1774676" [ 1753.303334] env[63371]: _type = "Task" [ 1753.303334] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.159874] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 0e2c8ced-198f-43be-9d41-703a7c590df4] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1754.168499] env[63371]: DEBUG nova.compute.utils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1754.178963] env[63371]: DEBUG oslo_vmware.api [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774676, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.179290] env[63371]: WARNING oslo_vmware.common.loopingcall [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] task run outlasted interval by 0.37557700000000005 sec [ 1754.194062] env[63371]: DEBUG nova.compute.manager [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1754.194199] env[63371]: DEBUG nova.network.neutron [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1754.230123] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774672, 'name': CreateVM_Task, 'duration_secs': 2.372092} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.234909] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1754.235555] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150714} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.241437] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.241744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired lock "[datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.245266] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1754.245266] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1754.245266] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1754.245266] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1754.248322] env[63371]: DEBUG oslo_vmware.api [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774676, 'name': PowerOffVM_Task, 'duration_secs': 0.251437} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.248797] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01fec2d7-c9b9-4854-bd8b-87604b767752 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.252141] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1754.252409] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1754.253469] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e826d672-c0d5-49db-a6be-457f91174c78 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.259692] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1754.259692] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f72ae4-44f8-2552-0690-543a95656366" [ 1754.259692] env[63371]: _type = "Task" [ 1754.259692] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.282256] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Releasing lock "[datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.282858] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Processing image 496b93e2-5142-43b5-a0fc-8e75cb31f472 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1754.283213] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472/496b93e2-5142-43b5-a0fc-8e75cb31f472.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1754.283392] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired lock "[datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472/496b93e2-5142-43b5-a0fc-8e75cb31f472.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1754.283604] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1754.284846] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71d3352c-450c-4e33-9829-0b4f9d35b393 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.309201] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1754.309201] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1754.309201] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e92cd7ff-c37e-4d95-8a34-74b1af9ea4c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.315680] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1754.315680] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522c2a4c-ba36-7618-9f9d-a3a2966bdf9f" [ 1754.315680] env[63371]: _type = "Task" [ 1754.315680] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.317900] env[63371]: DEBUG nova.network.neutron [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updated VIF entry in instance network info cache for port d3f41a80-52de-46a5-ac15-9a26e6710908. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1754.317900] env[63371]: DEBUG nova.network.neutron [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updating instance_info_cache with network_info: [{"id": "d3f41a80-52de-46a5-ac15-9a26e6710908", "address": "fa:16:3e:f6:cd:6b", "network": {"id": "9c25e5e9-468d-4d4c-93e0-c9815eff1c2e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-814005109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e693d73d70140c2ba065de2b60838c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3f41a80-52", "ovs_interfaceid": "d3f41a80-52de-46a5-ac15-9a26e6710908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.324955] env[63371]: DEBUG nova.policy [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25f28e53648c41d1a147c1aa04f0a708', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fb0da840f6847f19f03a1db8a1c3f4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1754.331553] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522c2a4c-ba36-7618-9f9d-a3a2966bdf9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.409247] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1754.409247] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1754.409247] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Deleting the datastore file [datastore1] 7349ecf6-2de7-4540-b713-7e29cbd3ff0b {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1754.409247] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b0e17d6a-1e41-421c-ab91-d348a324f1ab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.415025] env[63371]: DEBUG oslo_vmware.api [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1754.415025] env[63371]: value = "task-1774679" [ 1754.415025] env[63371]: _type = "Task" [ 1754.415025] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.424060] env[63371]: DEBUG oslo_vmware.api [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774679, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.683618] env[63371]: DEBUG nova.compute.manager [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1754.691147] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 195de525-1081-4db6-acf3-04a6d3eb142f] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1754.707129] env[63371]: DEBUG oslo_concurrency.lockutils [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.731893] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "interface-d00602b9-16bf-4c11-bc47-6076dddbf159-4590c30b-effd-423f-b0b2-c208bbdfffd7" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.732156] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-d00602b9-16bf-4c11-bc47-6076dddbf159-4590c30b-effd-423f-b0b2-c208bbdfffd7" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.732491] env[63371]: DEBUG nova.objects.instance [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'flavor' on Instance uuid d00602b9-16bf-4c11-bc47-6076dddbf159 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1754.742849] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.742969] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.743171] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.770274] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e723272a-0ee5-4d44-8df9-0cdd47c6d79a tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.752s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.774757] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.115s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.779127] env[63371]: INFO nova.compute.claims [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1754.788918] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf12838-fa78-4d4d-b425-196b1ca46e9f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.828184] env[63371]: DEBUG oslo_concurrency.lockutils [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] Releasing lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.828298] env[63371]: DEBUG nova.compute.manager [req-9a16a683-5ef3-42fa-8d84-fedd9e5f7277 req-cb06dfca-060b-44f0-97d0-cb22c737dc96 service nova] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Received event network-vif-deleted-ca5ead57-035d-446f-8117-2c2374008be8 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1754.829264] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance '9862b0f0-ccf6-4e69-9e78-cf864adaa65e' progress to 0 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1754.849220] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Preparing fetch location {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1754.849220] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Fetch image to [datastore1] OSTACK_IMG_d661717f-78b3-4f8f-b283-c22e3b501597/OSTACK_IMG_d661717f-78b3-4f8f-b283-c22e3b501597.vmdk {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1754.849220] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Downloading stream optimized image 496b93e2-5142-43b5-a0fc-8e75cb31f472 to [datastore1] OSTACK_IMG_d661717f-78b3-4f8f-b283-c22e3b501597/OSTACK_IMG_d661717f-78b3-4f8f-b283-c22e3b501597.vmdk on the data store datastore1 as vApp {{(pid=63371) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1754.849342] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Downloading image file data 496b93e2-5142-43b5-a0fc-8e75cb31f472 to the ESX as VM named 'OSTACK_IMG_d661717f-78b3-4f8f-b283-c22e3b501597' {{(pid=63371) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1754.870357] env[63371]: DEBUG nova.network.neutron [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Successfully created port: c6418174-b2f5-4848-bc28-4fc4fc2fb439 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1754.924560] env[63371]: DEBUG oslo_vmware.api [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774679, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.419205} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.926675] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1754.926874] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1754.927099] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1754.927239] env[63371]: INFO nova.compute.manager [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1754.927483] env[63371]: DEBUG oslo.service.loopingcall [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1754.927917] env[63371]: DEBUG nova.compute.manager [-] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1754.928024] env[63371]: DEBUG nova.network.neutron [-] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1754.957243] env[63371]: DEBUG oslo_vmware.rw_handles [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1754.957243] env[63371]: value = "resgroup-9" [ 1754.957243] env[63371]: _type = "ResourcePool" [ 1754.957243] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1754.958246] env[63371]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-fe565ebe-251a-4bd5-87f0-68252f3d55d4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.980527] env[63371]: DEBUG oslo_vmware.rw_handles [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lease: (returnval){ [ 1754.980527] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52590a17-f0b6-b6a2-c39a-209e8dc48cb6" [ 1754.980527] env[63371]: _type = "HttpNfcLease" [ 1754.980527] env[63371]: } obtained for vApp import into resource pool (val){ [ 1754.980527] env[63371]: value = "resgroup-9" [ 1754.980527] env[63371]: _type = "ResourcePool" [ 1754.980527] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1754.980527] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the lease: (returnval){ [ 1754.980527] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52590a17-f0b6-b6a2-c39a-209e8dc48cb6" [ 1754.980527] env[63371]: _type = "HttpNfcLease" [ 1754.980527] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1754.989810] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1754.989810] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52590a17-f0b6-b6a2-c39a-209e8dc48cb6" [ 1754.989810] env[63371]: _type = "HttpNfcLease" [ 1754.989810] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1755.196593] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 96627f7d-5f59-46a1-bcdd-a5d3aec1ac7f] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1755.305446] env[63371]: DEBUG nova.virt.hardware [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1755.306930] env[63371]: DEBUG nova.virt.hardware [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1755.307222] env[63371]: DEBUG nova.virt.hardware [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1755.307444] env[63371]: DEBUG nova.virt.hardware [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1755.307674] env[63371]: DEBUG nova.virt.hardware [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1755.308127] env[63371]: DEBUG nova.virt.hardware [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1755.308175] env[63371]: DEBUG nova.virt.hardware [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1755.308384] env[63371]: DEBUG nova.virt.hardware [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1755.308851] env[63371]: DEBUG nova.virt.hardware [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1755.309247] env[63371]: DEBUG nova.virt.hardware [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1755.309535] env[63371]: DEBUG nova.virt.hardware [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1755.312115] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4addd81-1e32-4966-a1d4-656882894d09 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.334379] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ec6880f-52dd-414c-ba1b-2276b0ee469a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.343448] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1755.344062] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95c04580-8383-439b-a636-4f4256e1c100 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.363342] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1755.370031] env[63371]: DEBUG oslo.service.loopingcall [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1755.372357] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1755.373197] env[63371]: DEBUG oslo_vmware.api [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1755.373197] env[63371]: value = "task-1774681" [ 1755.373197] env[63371]: _type = "Task" [ 1755.373197] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.373384] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f998b9b6-2f1c-4dec-842d-39aa02f00a30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.398283] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1755.398495] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance '9862b0f0-ccf6-4e69-9e78-cf864adaa65e' progress to 17 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1755.402745] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1755.402745] env[63371]: value = "task-1774682" [ 1755.402745] env[63371]: _type = "Task" [ 1755.402745] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.413134] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774682, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.489822] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1755.489822] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52590a17-f0b6-b6a2-c39a-209e8dc48cb6" [ 1755.489822] env[63371]: _type = "HttpNfcLease" [ 1755.489822] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1755.559353] env[63371]: DEBUG nova.objects.instance [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'pci_requests' on Instance uuid d00602b9-16bf-4c11-bc47-6076dddbf159 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1755.705209] env[63371]: DEBUG nova.compute.manager [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1755.705209] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: f8119ade-7018-4ad8-82fe-baa0a6753c64] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1755.748151] env[63371]: DEBUG nova.virt.hardware [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1755.748428] env[63371]: DEBUG nova.virt.hardware [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1755.748998] env[63371]: DEBUG nova.virt.hardware [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1755.748998] env[63371]: DEBUG nova.virt.hardware [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1755.748998] env[63371]: DEBUG nova.virt.hardware [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1755.749363] env[63371]: DEBUG nova.virt.hardware [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1755.749719] env[63371]: DEBUG nova.virt.hardware [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1755.749894] env[63371]: DEBUG nova.virt.hardware [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1755.750130] env[63371]: DEBUG nova.virt.hardware [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1755.750347] env[63371]: DEBUG nova.virt.hardware [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1755.750651] env[63371]: DEBUG nova.virt.hardware [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1755.751559] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38451fc1-6326-4dc3-b446-851052ad5a23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.765681] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a1bdccd-da7d-4ccb-9d1b-7e923b77abbb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.848042] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.848382] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.848726] env[63371]: DEBUG nova.network.neutron [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1755.870728] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "da4839fa-8597-411c-b30c-0ac9226fec1f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.870995] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.905488] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1755.905748] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1755.905913] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1755.906932] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1755.907432] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1755.907647] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1755.907905] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1755.908112] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1755.908331] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1755.910174] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1755.910174] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1755.919334] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0a85a4db-a78c-48f4-8941-2558d26ba9f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.943058] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774682, 'name': CreateVM_Task, 'duration_secs': 0.309806} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.944465] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1755.944803] env[63371]: DEBUG oslo_vmware.api [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1755.944803] env[63371]: value = "task-1774683" [ 1755.944803] env[63371]: _type = "Task" [ 1755.944803] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.948188] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.948188] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.948188] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1755.948538] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c6831ca-7cbc-4d65-a054-ee7ccd9d4dc3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.958608] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1755.958608] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524ed6f6-1701-d6fb-e429-0c96948d3bd3" [ 1755.958608] env[63371]: _type = "Task" [ 1755.958608] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.971412] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524ed6f6-1701-d6fb-e429-0c96948d3bd3, 'name': SearchDatastore_Task, 'duration_secs': 0.010305} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.971829] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1755.971829] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1755.972909] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1755.973084] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1755.973337] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1755.973798] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3b1cb18-d389-4a8b-b950-3e7657f51425 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.982352] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1755.982644] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1755.986507] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5de36c1d-ad8a-4f54-bbec-97be09d1d398 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.991950] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1755.991950] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d2b6c5-678d-f51e-19e1-9f2909d05a06" [ 1755.991950] env[63371]: _type = "Task" [ 1755.991950] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1755.994239] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1755.994239] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52590a17-f0b6-b6a2-c39a-209e8dc48cb6" [ 1755.994239] env[63371]: _type = "HttpNfcLease" [ 1755.994239] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1755.997369] env[63371]: DEBUG oslo_vmware.rw_handles [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1755.997369] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52590a17-f0b6-b6a2-c39a-209e8dc48cb6" [ 1755.997369] env[63371]: _type = "HttpNfcLease" [ 1755.997369] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1756.000468] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb1cd48-248f-48ca-affa-99a74a2c2183 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.010904] env[63371]: DEBUG nova.network.neutron [-] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1756.012456] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d2b6c5-678d-f51e-19e1-9f2909d05a06, 'name': SearchDatastore_Task, 'duration_secs': 0.00979} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.015843] env[63371]: DEBUG oslo_vmware.rw_handles [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52145247-ec04-ea36-dfbd-2438af2d210d/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1756.016025] env[63371]: DEBUG oslo_vmware.rw_handles [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Creating HTTP connection to write to file with size = 31664640 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52145247-ec04-ea36-dfbd-2438af2d210d/disk-0.vmdk. {{(pid=63371) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1756.020055] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a8e2727-9b07-4e56-bcf3-f63e9b837ed6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.081547] env[63371]: DEBUG nova.objects.base [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1756.081760] env[63371]: DEBUG nova.network.neutron [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1756.089181] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1756.089181] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522421c1-12d7-c782-8e5d-3b58611bc27b" [ 1756.089181] env[63371]: _type = "Task" [ 1756.089181] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.097415] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9701207f-c542-46ed-943a-4e630b2d540d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.105517] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522421c1-12d7-c782-8e5d-3b58611bc27b, 'name': SearchDatastore_Task, 'duration_secs': 0.028511} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1756.107554] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1756.107554] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad/e16e4a55-4198-4308-b12c-d9ac07daecad.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1756.112021] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-179e25fc-0303-4713-84a7-478056e9b515 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.117613] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1756.117613] env[63371]: value = "task-1774684" [ 1756.117613] env[63371]: _type = "Task" [ 1756.117613] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1756.136537] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774684, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.206552] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 594ff846-8e3e-4882-8ddc-41f824a77a5c] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1756.225472] env[63371]: DEBUG nova.policy [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aacd81490704110b6cc6aba338883a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a5897667b6b47deb7ff5b64f9499f36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1756.256529] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6f47f6-e00b-44b1-b461-fc8804a4e142 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.264505] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdfe6a1-a874-466c-b287-9cc5413b0026 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.300096] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175d0542-5053-4361-8d3a-32e7d37a453e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.309127] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc6ba645-9db8-4980-a0ad-e7edb0b668ab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.323966] env[63371]: DEBUG nova.compute.provider_tree [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1756.374935] env[63371]: DEBUG nova.compute.manager [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1756.464933] env[63371]: DEBUG oslo_vmware.api [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774683, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.516759] env[63371]: INFO nova.compute.manager [-] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Took 1.59 seconds to deallocate network for instance. [ 1756.584098] env[63371]: DEBUG nova.compute.manager [req-f15ffa0a-0ef1-44eb-8949-239117ceaeb9 req-1c31b5c9-99e1-4803-98e1-e86efd6e8c6f service nova] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Received event network-vif-deleted-adaefbec-4084-4f4d-8db6-b7f5ff8df5ea {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1756.629148] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774684, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1756.714031] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e6cd62ce-f6d2-4e5b-acbc-7527a94e0932] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1756.830845] env[63371]: DEBUG nova.scheduler.client.report [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1756.851276] env[63371]: DEBUG nova.network.neutron [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance_info_cache with network_info: [{"id": "e144cd6b-c3f5-496e-99c6-19e9ab58c042", "address": "fa:16:3e:99:d0:57", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape144cd6b-c3", "ovs_interfaceid": "e144cd6b-c3f5-496e-99c6-19e9ab58c042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1756.908390] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.960705] env[63371]: DEBUG oslo_vmware.api [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774683, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.024053] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.131235] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774684, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.729245} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.133126] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad/e16e4a55-4198-4308-b12c-d9ac07daecad.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1757.133756] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1757.133756] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81e5f918-5ad1-4afa-88c8-40df7693ad2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.141799] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1757.141799] env[63371]: value = "task-1774685" [ 1757.141799] env[63371]: _type = "Task" [ 1757.141799] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.154296] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774685, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.214402] env[63371]: DEBUG nova.network.neutron [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Successfully updated port: c6418174-b2f5-4848-bc28-4fc4fc2fb439 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1757.215614] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 7e66011a-4fed-471f-82ea-e1016f92ad39] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1757.336279] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.561s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.337232] env[63371]: DEBUG nova.compute.manager [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1757.342186] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.144s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.342483] env[63371]: DEBUG nova.objects.instance [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lazy-loading 'resources' on Instance uuid 158259a4-f54a-4192-b235-f03838193516 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1757.360585] env[63371]: DEBUG oslo_concurrency.lockutils [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1757.460978] env[63371]: DEBUG oslo_vmware.api [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774683, 'name': ReconfigVM_Task, 'duration_secs': 1.20037} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.461840] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance '9862b0f0-ccf6-4e69-9e78-cf864adaa65e' progress to 33 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1757.660465] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774685, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073284} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1757.661986] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1757.662947] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5b4100-35bc-408c-921b-caf497bddb0b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.691379] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad/e16e4a55-4198-4308-b12c-d9ac07daecad.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1757.697957] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57aa6059-3fac-461d-92cd-50bdcbfbb9d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.719662] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "refresh_cache-0c8c6997-bec8-4a3b-80cf-cbf35f3843f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.719662] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "refresh_cache-0c8c6997-bec8-4a3b-80cf-cbf35f3843f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.719662] env[63371]: DEBUG nova.network.neutron [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1757.720437] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: aae72990-b3ab-4a9c-a4fe-73dc7c5e59bf] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1757.724576] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1757.724576] env[63371]: value = "task-1774686" [ 1757.724576] env[63371]: _type = "Task" [ 1757.724576] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1757.736648] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774686, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1757.744681] env[63371]: DEBUG oslo_vmware.rw_handles [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Completed reading data from the image iterator. {{(pid=63371) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1757.744929] env[63371]: DEBUG oslo_vmware.rw_handles [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52145247-ec04-ea36-dfbd-2438af2d210d/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1757.745890] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc81d72a-bd03-4aba-8452-5a7698fa4b85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.754169] env[63371]: DEBUG oslo_vmware.rw_handles [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52145247-ec04-ea36-dfbd-2438af2d210d/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1757.754169] env[63371]: DEBUG oslo_vmware.rw_handles [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52145247-ec04-ea36-dfbd-2438af2d210d/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1757.755830] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-05e18cb0-a28b-4080-87d7-1c435f34bf99 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.845727] env[63371]: DEBUG nova.compute.utils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1757.851094] env[63371]: DEBUG nova.compute.manager [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1757.851094] env[63371]: DEBUG nova.network.neutron [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1757.899584] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d557adb3-7533-4b75-9079-356e3b03373e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.927742] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480067e4-78de-4814-845d-dedf6c766c97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.936535] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance '3a6c12a7-732f-4a73-a8c5-6810b554cc03' progress to 83 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1757.941373] env[63371]: DEBUG nova.policy [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5f8b33e8e7bf484c96b03b4db8916359', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f816d2e4eddd479a9dcc827a7828d119', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1757.973209] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1757.973462] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1757.974429] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1757.974429] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1757.974429] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1757.974429] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1757.974429] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1757.974429] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1757.974749] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1757.974749] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1757.975069] env[63371]: DEBUG nova.virt.hardware [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1757.980391] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Reconfiguring VM instance instance-0000003d to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1757.983756] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-23c9f6c1-8b53-4763-ae64-32e5a8846d60 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.002730] env[63371]: DEBUG oslo_vmware.api [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1758.002730] env[63371]: value = "task-1774687" [ 1758.002730] env[63371]: _type = "Task" [ 1758.002730] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.016414] env[63371]: DEBUG oslo_vmware.api [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774687, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.020929] env[63371]: DEBUG oslo_vmware.rw_handles [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52145247-ec04-ea36-dfbd-2438af2d210d/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1758.021174] env[63371]: INFO nova.virt.vmwareapi.images [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Downloaded image file data 496b93e2-5142-43b5-a0fc-8e75cb31f472 [ 1758.024750] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfed7ef-41e1-413d-9e25-e406ac6bd855 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.042915] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-019533f7-e590-4052-a8cc-0d9b4480fa30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.078660] env[63371]: INFO nova.virt.vmwareapi.images [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] The imported VM was unregistered [ 1758.081434] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Caching image {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1758.081434] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Creating directory with path [datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472 {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1758.081898] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39e91733-99f1-4a68-b34a-7c64fc73d6fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.103066] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Created directory with path [datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472 {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1758.103263] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_d661717f-78b3-4f8f-b283-c22e3b501597/OSTACK_IMG_d661717f-78b3-4f8f-b283-c22e3b501597.vmdk to [datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472/496b93e2-5142-43b5-a0fc-8e75cb31f472.vmdk. {{(pid=63371) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1758.106480] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-8fb8d1f3-95eb-4a9c-a75c-d27729d60175 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.115570] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1758.115570] env[63371]: value = "task-1774689" [ 1758.115570] env[63371]: _type = "Task" [ 1758.115570] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.122465] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774689, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.223684] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: dcf8063b-56eb-439c-bee5-139a1e157714] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1758.241513] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774686, 'name': ReconfigVM_Task, 'duration_secs': 0.467172} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.244478] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Reconfigured VM instance instance-0000005f to attach disk [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad/e16e4a55-4198-4308-b12c-d9ac07daecad.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1758.245677] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c710e9aa-0ba2-48bf-b99b-4454060b952b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.255707] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1758.255707] env[63371]: value = "task-1774690" [ 1758.255707] env[63371]: _type = "Task" [ 1758.255707] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.264385] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774690, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.266050] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b9e45a-9bea-46cf-a700-faeb2c34dd5d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.273543] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9fa1c4-b976-4905-87ae-e5c31c59bd31 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.309815] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2c4725-f02e-49fb-91c6-52cd34b6309a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.313148] env[63371]: DEBUG nova.network.neutron [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1758.321281] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-780d8dba-f94c-4d1f-9c0e-567479848423 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.336534] env[63371]: DEBUG nova.compute.provider_tree [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1758.354660] env[63371]: DEBUG nova.compute.manager [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1758.408336] env[63371]: DEBUG nova.compute.manager [req-fe5d5696-d6f5-40c2-9d96-41bc3a2341da req-d00916fa-7324-46d5-86b2-e7de089c636d service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received event network-vif-plugged-4590c30b-effd-423f-b0b2-c208bbdfffd7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1758.408336] env[63371]: DEBUG oslo_concurrency.lockutils [req-fe5d5696-d6f5-40c2-9d96-41bc3a2341da req-d00916fa-7324-46d5-86b2-e7de089c636d service nova] Acquiring lock "d00602b9-16bf-4c11-bc47-6076dddbf159-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.408336] env[63371]: DEBUG oslo_concurrency.lockutils [req-fe5d5696-d6f5-40c2-9d96-41bc3a2341da req-d00916fa-7324-46d5-86b2-e7de089c636d service nova] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.408336] env[63371]: DEBUG oslo_concurrency.lockutils [req-fe5d5696-d6f5-40c2-9d96-41bc3a2341da req-d00916fa-7324-46d5-86b2-e7de089c636d service nova] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.408336] env[63371]: DEBUG nova.compute.manager [req-fe5d5696-d6f5-40c2-9d96-41bc3a2341da req-d00916fa-7324-46d5-86b2-e7de089c636d service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] No waiting events found dispatching network-vif-plugged-4590c30b-effd-423f-b0b2-c208bbdfffd7 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1758.408336] env[63371]: WARNING nova.compute.manager [req-fe5d5696-d6f5-40c2-9d96-41bc3a2341da req-d00916fa-7324-46d5-86b2-e7de089c636d service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received unexpected event network-vif-plugged-4590c30b-effd-423f-b0b2-c208bbdfffd7 for instance with vm_state active and task_state None. [ 1758.446864] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1758.448449] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-223b8093-90b2-4fa1-883a-4a0d64fbbaf5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.458534] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1758.458534] env[63371]: value = "task-1774691" [ 1758.458534] env[63371]: _type = "Task" [ 1758.458534] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.466610] env[63371]: DEBUG nova.network.neutron [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Successfully updated port: 4590c30b-effd-423f-b0b2-c208bbdfffd7 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1758.474563] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774691, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.516856] env[63371]: DEBUG oslo_vmware.api [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774687, 'name': ReconfigVM_Task, 'duration_secs': 0.18832} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.517301] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Reconfigured VM instance instance-0000003d to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1758.518229] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1079d9-9fdc-4829-967b-7bc3d20fcc9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.547021] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 9862b0f0-ccf6-4e69-9e78-cf864adaa65e/9862b0f0-ccf6-4e69-9e78-cf864adaa65e.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1758.547021] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c313d79-dd2f-478b-933d-086812da4048 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.570140] env[63371]: DEBUG oslo_vmware.api [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1758.570140] env[63371]: value = "task-1774692" [ 1758.570140] env[63371]: _type = "Task" [ 1758.570140] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.580015] env[63371]: DEBUG oslo_vmware.api [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774692, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.626185] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774689, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.644851] env[63371]: DEBUG nova.network.neutron [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Successfully created port: 5d79ab9c-2d92-460b-818d-59416391cc29 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1758.727995] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 64fc862c-a755-4cac-997b-7a8328638269] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1758.742388] env[63371]: DEBUG nova.network.neutron [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Updating instance_info_cache with network_info: [{"id": "c6418174-b2f5-4848-bc28-4fc4fc2fb439", "address": "fa:16:3e:56:43:1b", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6418174-b2", "ovs_interfaceid": "c6418174-b2f5-4848-bc28-4fc4fc2fb439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1758.769899] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774690, 'name': Rename_Task, 'duration_secs': 0.169708} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.770090] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1758.771485] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0219671c-7372-4259-a762-170c4f4cb6c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1758.782493] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1758.782493] env[63371]: value = "task-1774693" [ 1758.782493] env[63371]: _type = "Task" [ 1758.782493] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1758.796911] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774693, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1758.839940] env[63371]: DEBUG nova.scheduler.client.report [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1758.968551] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1758.969542] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1758.969740] env[63371]: DEBUG nova.network.neutron [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1758.975116] env[63371]: DEBUG oslo_vmware.api [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774691, 'name': PowerOnVM_Task, 'duration_secs': 0.478856} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1758.975836] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1758.976061] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-20b5d40f-e575-490d-914b-a8e538e2a508 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance '3a6c12a7-732f-4a73-a8c5-6810b554cc03' progress to 100 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1759.083868] env[63371]: DEBUG oslo_vmware.api [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774692, 'name': ReconfigVM_Task, 'duration_secs': 0.418015} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1759.084213] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 9862b0f0-ccf6-4e69-9e78-cf864adaa65e/9862b0f0-ccf6-4e69-9e78-cf864adaa65e.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1759.084516] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance '9862b0f0-ccf6-4e69-9e78-cf864adaa65e' progress to 50 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1759.129692] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774689, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.232154] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: be37eb1c-8582-4446-afd6-ae11a8cadf95] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1759.244484] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "refresh_cache-0c8c6997-bec8-4a3b-80cf-cbf35f3843f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.244825] env[63371]: DEBUG nova.compute.manager [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Instance network_info: |[{"id": "c6418174-b2f5-4848-bc28-4fc4fc2fb439", "address": "fa:16:3e:56:43:1b", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6418174-b2", "ovs_interfaceid": "c6418174-b2f5-4848-bc28-4fc4fc2fb439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1759.245288] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:43:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6418174-b2f5-4848-bc28-4fc4fc2fb439', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1759.255507] env[63371]: DEBUG oslo.service.loopingcall [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1759.255816] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1759.256067] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d6080572-3584-4499-8309-a47b5190cca3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.284883] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1759.284883] env[63371]: value = "task-1774694" [ 1759.284883] env[63371]: _type = "Task" [ 1759.284883] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.301579] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774693, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.307247] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774694, 'name': CreateVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.345237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.003s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.348210] env[63371]: DEBUG oslo_concurrency.lockutils [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.524s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1759.364531] env[63371]: DEBUG nova.compute.manager [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1759.385025] env[63371]: INFO nova.scheduler.client.report [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted allocations for instance 158259a4-f54a-4192-b235-f03838193516 [ 1759.401403] env[63371]: DEBUG nova.virt.hardware [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1759.405047] env[63371]: DEBUG nova.virt.hardware [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1759.405047] env[63371]: DEBUG nova.virt.hardware [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1759.405047] env[63371]: DEBUG nova.virt.hardware [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1759.405047] env[63371]: DEBUG nova.virt.hardware [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1759.405047] env[63371]: DEBUG nova.virt.hardware [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1759.405047] env[63371]: DEBUG nova.virt.hardware [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1759.405047] env[63371]: DEBUG nova.virt.hardware [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1759.405047] env[63371]: DEBUG nova.virt.hardware [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1759.405047] env[63371]: DEBUG nova.virt.hardware [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1759.405047] env[63371]: DEBUG nova.virt.hardware [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1759.405561] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5179de7-4f68-40d0-8079-a2741145b766 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.422101] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251221e7-947c-4ef9-b729-00ca93ffbd8c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.554557] env[63371]: WARNING nova.network.neutron [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] 78c77028-c23a-4160-8b08-d336e8101b3b already exists in list: networks containing: ['78c77028-c23a-4160-8b08-d336e8101b3b']. ignoring it [ 1759.554773] env[63371]: WARNING nova.network.neutron [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] 78c77028-c23a-4160-8b08-d336e8101b3b already exists in list: networks containing: ['78c77028-c23a-4160-8b08-d336e8101b3b']. ignoring it [ 1759.594190] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66fd0d5d-5d1e-49bc-83c6-917a6f9f40a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.624639] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6939417e-050a-4795-843f-59053c40225e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.636280] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774689, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.654387] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance '9862b0f0-ccf6-4e69-9e78-cf864adaa65e' progress to 67 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1759.738428] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 50d5eac1-0752-4089-948c-b04439df6f6c] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1759.790153] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63ece53-fe4e-4528-961f-e503f65005b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.803644] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774693, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.812485] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d65bc8d-d3be-4e94-8b5f-03f990b1a48b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.815817] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774694, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.852060] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39f1900-bc13-49d9-ba06-134ea81a7bc2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.862336] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d79af5-a669-4094-ad4d-c71d40608924 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.887310] env[63371]: DEBUG nova.compute.provider_tree [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1759.910033] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc26e2a8-8d95-4c23-9cee-ffd2ae144b73 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "158259a4-f54a-4192-b235-f03838193516" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.351s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.053633] env[63371]: DEBUG nova.compute.manager [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Received event network-vif-plugged-c6418174-b2f5-4848-bc28-4fc4fc2fb439 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1760.053895] env[63371]: DEBUG oslo_concurrency.lockutils [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] Acquiring lock "0c8c6997-bec8-4a3b-80cf-cbf35f3843f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.054109] env[63371]: DEBUG oslo_concurrency.lockutils [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] Lock "0c8c6997-bec8-4a3b-80cf-cbf35f3843f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.056307] env[63371]: DEBUG oslo_concurrency.lockutils [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] Lock "0c8c6997-bec8-4a3b-80cf-cbf35f3843f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.056307] env[63371]: DEBUG nova.compute.manager [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] No waiting events found dispatching network-vif-plugged-c6418174-b2f5-4848-bc28-4fc4fc2fb439 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1760.056307] env[63371]: WARNING nova.compute.manager [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Received unexpected event network-vif-plugged-c6418174-b2f5-4848-bc28-4fc4fc2fb439 for instance with vm_state building and task_state spawning. [ 1760.056307] env[63371]: DEBUG nova.compute.manager [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Received event network-changed-c6418174-b2f5-4848-bc28-4fc4fc2fb439 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1760.056307] env[63371]: DEBUG nova.compute.manager [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Refreshing instance network info cache due to event network-changed-c6418174-b2f5-4848-bc28-4fc4fc2fb439. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1760.056307] env[63371]: DEBUG oslo_concurrency.lockutils [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] Acquiring lock "refresh_cache-0c8c6997-bec8-4a3b-80cf-cbf35f3843f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.056307] env[63371]: DEBUG oslo_concurrency.lockutils [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] Acquired lock "refresh_cache-0c8c6997-bec8-4a3b-80cf-cbf35f3843f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.056307] env[63371]: DEBUG nova.network.neutron [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Refreshing network info cache for port c6418174-b2f5-4848-bc28-4fc4fc2fb439 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1760.133775] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774689, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.206952] env[63371]: DEBUG nova.network.neutron [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Port 82aece5e-dc40-4c18-a1a9-4b4e859fef2a binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1760.240758] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 855005ae-3b0e-4ad7-80cf-266075fc6d0f] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1760.297667] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774693, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.306011] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774694, 'name': CreateVM_Task, 'duration_secs': 0.826313} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.306011] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1760.307547] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.307740] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.308175] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1760.310534] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92e263f1-14e0-4977-bcb3-72a79e658d60 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.317887] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1760.317887] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5211d3eb-90b1-c388-2311-9d276989ef11" [ 1760.317887] env[63371]: _type = "Task" [ 1760.317887] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.331658] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5211d3eb-90b1-c388-2311-9d276989ef11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.354768] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquiring lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1760.354768] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.391088] env[63371]: DEBUG nova.scheduler.client.report [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1760.535986] env[63371]: DEBUG nova.network.neutron [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updating instance_info_cache with network_info: [{"id": "bc8b891d-040a-4a55-a281-311c08ae828d", "address": "fa:16:3e:ea:27:0c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8b891d-04", "ovs_interfaceid": "bc8b891d-040a-4a55-a281-311c08ae828d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "57835801-cbba-4176-8f6b-8d0ec76aa66e", "address": "fa:16:3e:29:75:d5", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57835801-cb", "ovs_interfaceid": "57835801-cbba-4176-8f6b-8d0ec76aa66e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4590c30b-effd-423f-b0b2-c208bbdfffd7", "address": "fa:16:3e:dd:c9:8c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4590c30b-ef", "ovs_interfaceid": "4590c30b-effd-423f-b0b2-c208bbdfffd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.631870] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774689, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.745481] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 36b81143-211f-4c77-854b-abe0d3f39ce4] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1760.795589] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774693, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.831904] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5211d3eb-90b1-c388-2311-9d276989ef11, 'name': SearchDatastore_Task, 'duration_secs': 0.106798} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1760.832422] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1760.832930] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1760.833154] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1760.833306] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1760.833685] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1760.833988] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa3f0202-ee9a-4d7a-adc7-501bb3b8d713 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.844436] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1760.844436] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1760.845167] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-82760e07-e2ea-4a43-a657-72273ed763a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.851798] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1760.851798] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524d4253-d17d-4116-ab41-496fa697dd71" [ 1760.851798] env[63371]: _type = "Task" [ 1760.851798] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1760.860715] env[63371]: DEBUG nova.compute.manager [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1760.863360] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524d4253-d17d-4116-ab41-496fa697dd71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.903508] env[63371]: DEBUG oslo_concurrency.lockutils [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.555s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1760.905381] env[63371]: INFO nova.compute.manager [None req-090de675-2f44-4a28-8925-ab38aa3f6053 tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Successfully reverted task state from rebuilding on failure for instance. [ 1760.912786] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.282s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1760.913050] env[63371]: DEBUG nova.objects.instance [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lazy-loading 'resources' on Instance uuid 485a2d6a-1b58-470d-9dc5-8cf31b6726ef {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1760.987274] env[63371]: DEBUG nova.network.neutron [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Updated VIF entry in instance network info cache for port c6418174-b2f5-4848-bc28-4fc4fc2fb439. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1760.988789] env[63371]: DEBUG nova.network.neutron [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Updating instance_info_cache with network_info: [{"id": "c6418174-b2f5-4848-bc28-4fc4fc2fb439", "address": "fa:16:3e:56:43:1b", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6418174-b2", "ovs_interfaceid": "c6418174-b2f5-4848-bc28-4fc4fc2fb439", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1761.004187] env[63371]: DEBUG nova.network.neutron [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Successfully updated port: 5d79ab9c-2d92-460b-818d-59416391cc29 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1761.039837] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.040354] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1761.040512] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1761.041413] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdcc874c-956e-4361-895f-811442db8ca8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.060353] env[63371]: DEBUG nova.virt.hardware [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1761.060353] env[63371]: DEBUG nova.virt.hardware [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1761.060353] env[63371]: DEBUG nova.virt.hardware [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1761.060545] env[63371]: DEBUG nova.virt.hardware [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1761.060545] env[63371]: DEBUG nova.virt.hardware [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1761.060657] env[63371]: DEBUG nova.virt.hardware [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1761.060979] env[63371]: DEBUG nova.virt.hardware [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1761.061156] env[63371]: DEBUG nova.virt.hardware [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1761.061637] env[63371]: DEBUG nova.virt.hardware [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1761.061873] env[63371]: DEBUG nova.virt.hardware [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1761.062112] env[63371]: DEBUG nova.virt.hardware [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1761.068936] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Reconfiguring VM to attach interface {{(pid=63371) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1761.069723] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a09e7eab-7e8c-4e9c-882b-11bba417c4c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.087584] env[63371]: DEBUG oslo_vmware.api [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1761.087584] env[63371]: value = "task-1774695" [ 1761.087584] env[63371]: _type = "Task" [ 1761.087584] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.095974] env[63371]: DEBUG oslo_vmware.api [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774695, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.133273] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774689, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.70633} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.133637] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_d661717f-78b3-4f8f-b283-c22e3b501597/OSTACK_IMG_d661717f-78b3-4f8f-b283-c22e3b501597.vmdk to [datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472/496b93e2-5142-43b5-a0fc-8e75cb31f472.vmdk. [ 1761.133834] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Cleaning up location [datastore1] OSTACK_IMG_d661717f-78b3-4f8f-b283-c22e3b501597 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1761.135157] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_d661717f-78b3-4f8f-b283-c22e3b501597 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1761.135157] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44ca27e6-0c80-4f0a-b788-a52af290c011 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.141834] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1761.141834] env[63371]: value = "task-1774696" [ 1761.141834] env[63371]: _type = "Task" [ 1761.141834] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.152306] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774696, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.237367] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.238426] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.238927] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.248533] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: fb2ddd3e-7adc-4a34-8797-0e98fdf19379] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1761.298184] env[63371]: DEBUG oslo_vmware.api [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774693, 'name': PowerOnVM_Task, 'duration_secs': 2.175428} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.298184] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1761.298184] env[63371]: DEBUG nova.compute.manager [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1761.298711] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1980169d-46dd-49d4-bc6f-ee3ba0269470 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.363099] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524d4253-d17d-4116-ab41-496fa697dd71, 'name': SearchDatastore_Task, 'duration_secs': 0.032432} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.366216] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2709c330-787c-4429-b0f7-690ddac6a508 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.376113] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1761.376113] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b88510-a6ab-d401-b6a0-023e8c140a60" [ 1761.376113] env[63371]: _type = "Task" [ 1761.376113] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.385310] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b88510-a6ab-d401-b6a0-023e8c140a60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.394104] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.491238] env[63371]: DEBUG oslo_concurrency.lockutils [req-cb9014ff-e356-458c-bbfd-2bb75560e8b6 req-7b1a2a3d-b59d-4146-822e-2108592d6fc5 service nova] Releasing lock "refresh_cache-0c8c6997-bec8-4a3b-80cf-cbf35f3843f8" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.507365] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Acquiring lock "refresh_cache-943e2506-03a4-4633-b55b-381d9d8d9ef6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1761.507365] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Acquired lock "refresh_cache-943e2506-03a4-4633-b55b-381d9d8d9ef6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1761.507566] env[63371]: DEBUG nova.network.neutron [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1761.599733] env[63371]: DEBUG oslo_vmware.api [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774695, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.655974] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774696, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085659} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.655974] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1761.656208] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Releasing lock "[datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472/496b93e2-5142-43b5-a0fc-8e75cb31f472.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.656624] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472/496b93e2-5142-43b5-a0fc-8e75cb31f472.vmdk to [datastore1] 9985dbcd-4498-4629-aae5-5e1933307c50/9985dbcd-4498-4629-aae5-5e1933307c50.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1761.656741] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5032076c-d744-4908-8bfe-3132fc066033 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.667672] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1761.667672] env[63371]: value = "task-1774697" [ 1761.667672] env[63371]: _type = "Task" [ 1761.667672] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.676504] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774697, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.709654] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c469a0-e2df-4028-8cb5-e816be887f1d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.719488] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8f46d5-2a66-4f9d-9ecb-529a599848c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.766488] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: b5e259ea-d103-41c6-84b3-748813bb514d] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1761.770952] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1921b386-1e6a-49b3-84ba-2cba9489779e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.781219] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c1f44e-f4c8-480a-82aa-1cf6c17dab4a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.798762] env[63371]: DEBUG nova.compute.provider_tree [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1761.815827] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.887211] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b88510-a6ab-d401-b6a0-023e8c140a60, 'name': SearchDatastore_Task, 'duration_secs': 0.019921} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.887496] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.887752] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8/0c8c6997-bec8-4a3b-80cf-cbf35f3843f8.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1761.888035] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f33fdf2-7e60-4b36-8631-98bfc3dda91e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.895094] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1761.895094] env[63371]: value = "task-1774698" [ 1761.895094] env[63371]: _type = "Task" [ 1761.895094] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.903621] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774698, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.048143] env[63371]: DEBUG nova.network.neutron [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1762.063165] env[63371]: DEBUG nova.compute.manager [req-48b09836-8c03-4b0e-baa6-a5488bc60fc1 req-0b35c604-d29b-42c5-b3f7-ea18dcf5de4c service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received event network-changed-4590c30b-effd-423f-b0b2-c208bbdfffd7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1762.063340] env[63371]: DEBUG nova.compute.manager [req-48b09836-8c03-4b0e-baa6-a5488bc60fc1 req-0b35c604-d29b-42c5-b3f7-ea18dcf5de4c service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Refreshing instance network info cache due to event network-changed-4590c30b-effd-423f-b0b2-c208bbdfffd7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1762.063548] env[63371]: DEBUG oslo_concurrency.lockutils [req-48b09836-8c03-4b0e-baa6-a5488bc60fc1 req-0b35c604-d29b-42c5-b3f7-ea18dcf5de4c service nova] Acquiring lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1762.063773] env[63371]: DEBUG oslo_concurrency.lockutils [req-48b09836-8c03-4b0e-baa6-a5488bc60fc1 req-0b35c604-d29b-42c5-b3f7-ea18dcf5de4c service nova] Acquired lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1762.064032] env[63371]: DEBUG nova.network.neutron [req-48b09836-8c03-4b0e-baa6-a5488bc60fc1 req-0b35c604-d29b-42c5-b3f7-ea18dcf5de4c service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Refreshing network info cache for port 4590c30b-effd-423f-b0b2-c208bbdfffd7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1762.111104] env[63371]: DEBUG oslo_vmware.api [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774695, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.181922] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774697, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.273309] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e8bd5802-d2ff-4348-92d4-c23277f4eaeb] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1762.304805] env[63371]: DEBUG nova.scheduler.client.report [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1762.320567] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1762.320791] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1762.320975] env[63371]: DEBUG nova.network.neutron [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1762.355226] env[63371]: DEBUG nova.network.neutron [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Updating instance_info_cache with network_info: [{"id": "5d79ab9c-2d92-460b-818d-59416391cc29", "address": "fa:16:3e:d8:11:71", "network": {"id": "fd82ce5b-c777-464a-9d98-de8610c1cfc8", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1544091952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f816d2e4eddd479a9dcc827a7828d119", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d79ab9c-2d", "ovs_interfaceid": "5d79ab9c-2d92-460b-818d-59416391cc29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1762.373771] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Acquiring lock "6c2edb87-7a36-4814-ac4a-199cdca1ef68" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.374655] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Lock "6c2edb87-7a36-4814-ac4a-199cdca1ef68" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.407153] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774698, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.614829] env[63371]: DEBUG oslo_vmware.api [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774695, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.629443] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1762.629767] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.629971] env[63371]: DEBUG nova.compute.manager [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Going to confirm migration 4 {{(pid=63371) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1762.682582] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774697, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.777757] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: b48a8e83-e581-4886-833b-bbce155d40d9] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1762.812346] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.899s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1762.822340] env[63371]: DEBUG oslo_concurrency.lockutils [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.111s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1762.822681] env[63371]: DEBUG nova.objects.instance [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lazy-loading 'resources' on Instance uuid c03e2dc4-75d9-4fbb-afc8-046cbbf908ac {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1762.846300] env[63371]: INFO nova.scheduler.client.report [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted allocations for instance 485a2d6a-1b58-470d-9dc5-8cf31b6726ef [ 1762.858363] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Releasing lock "refresh_cache-943e2506-03a4-4633-b55b-381d9d8d9ef6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1762.859716] env[63371]: DEBUG nova.compute.manager [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Instance network_info: |[{"id": "5d79ab9c-2d92-460b-818d-59416391cc29", "address": "fa:16:3e:d8:11:71", "network": {"id": "fd82ce5b-c777-464a-9d98-de8610c1cfc8", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1544091952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f816d2e4eddd479a9dcc827a7828d119", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d79ab9c-2d", "ovs_interfaceid": "5d79ab9c-2d92-460b-818d-59416391cc29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1762.859716] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:11:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e30245c5-78f5-48e6-b504-c6c21f5a9b45', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d79ab9c-2d92-460b-818d-59416391cc29', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1762.869142] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Creating folder: Project (f816d2e4eddd479a9dcc827a7828d119). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1762.869505] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80e0c92a-ca96-4386-b870-38431b58936d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.877446] env[63371]: DEBUG nova.compute.manager [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1762.895976] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Created folder: Project (f816d2e4eddd479a9dcc827a7828d119) in parent group-v368199. [ 1762.896346] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Creating folder: Instances. Parent ref: group-v368463. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1762.896522] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0212f34-878d-483c-81be-7d517f304fa0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.911490] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774698, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.917023] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Created folder: Instances in parent group-v368463. [ 1762.917023] env[63371]: DEBUG oslo.service.loopingcall [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1762.917023] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1762.917023] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3162e463-e35b-4389-a1e8-628af0110fba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1762.947212] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1762.947212] env[63371]: value = "task-1774701" [ 1762.947212] env[63371]: _type = "Task" [ 1762.947212] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1762.958071] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774701, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.959391] env[63371]: DEBUG nova.network.neutron [req-48b09836-8c03-4b0e-baa6-a5488bc60fc1 req-0b35c604-d29b-42c5-b3f7-ea18dcf5de4c service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updated VIF entry in instance network info cache for port 4590c30b-effd-423f-b0b2-c208bbdfffd7. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1762.959905] env[63371]: DEBUG nova.network.neutron [req-48b09836-8c03-4b0e-baa6-a5488bc60fc1 req-0b35c604-d29b-42c5-b3f7-ea18dcf5de4c service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updating instance_info_cache with network_info: [{"id": "bc8b891d-040a-4a55-a281-311c08ae828d", "address": "fa:16:3e:ea:27:0c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8b891d-04", "ovs_interfaceid": "bc8b891d-040a-4a55-a281-311c08ae828d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "57835801-cbba-4176-8f6b-8d0ec76aa66e", "address": "fa:16:3e:29:75:d5", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57835801-cb", "ovs_interfaceid": "57835801-cbba-4176-8f6b-8d0ec76aa66e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4590c30b-effd-423f-b0b2-c208bbdfffd7", "address": "fa:16:3e:dd:c9:8c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4590c30b-ef", "ovs_interfaceid": "4590c30b-effd-423f-b0b2-c208bbdfffd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.105038] env[63371]: DEBUG oslo_vmware.api [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774695, 'name': ReconfigVM_Task, 'duration_secs': 1.857636} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.106823] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.107399] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Reconfigured VM to attach interface {{(pid=63371) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1763.179612] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774697, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.221073] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.221369] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.221633] env[63371]: DEBUG nova.network.neutron [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1763.221913] env[63371]: DEBUG nova.objects.instance [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lazy-loading 'info_cache' on Instance uuid 3a6c12a7-732f-4a73-a8c5-6810b554cc03 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1763.258182] env[63371]: DEBUG nova.network.neutron [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance_info_cache with network_info: [{"id": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "address": "fa:16:3e:50:09:23", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82aece5e-dc", "ovs_interfaceid": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1763.287107] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 9249f27a-1985-4be1-947c-e433c7aa26f1] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1763.359210] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b7901e3e-f45b-447e-af98-1f2450dc4828 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "485a2d6a-1b58-470d-9dc5-8cf31b6726ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.177s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.410247] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.410510] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.416890] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.422149] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774698, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.431447] env[63371]: DEBUG nova.compute.manager [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Received event network-vif-plugged-5d79ab9c-2d92-460b-818d-59416391cc29 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1763.431774] env[63371]: DEBUG oslo_concurrency.lockutils [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] Acquiring lock "943e2506-03a4-4633-b55b-381d9d8d9ef6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1763.432016] env[63371]: DEBUG oslo_concurrency.lockutils [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] Lock "943e2506-03a4-4633-b55b-381d9d8d9ef6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1763.432134] env[63371]: DEBUG oslo_concurrency.lockutils [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] Lock "943e2506-03a4-4633-b55b-381d9d8d9ef6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.432309] env[63371]: DEBUG nova.compute.manager [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] No waiting events found dispatching network-vif-plugged-5d79ab9c-2d92-460b-818d-59416391cc29 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1763.432476] env[63371]: WARNING nova.compute.manager [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Received unexpected event network-vif-plugged-5d79ab9c-2d92-460b-818d-59416391cc29 for instance with vm_state building and task_state spawning. [ 1763.432670] env[63371]: DEBUG nova.compute.manager [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Received event network-changed-5d79ab9c-2d92-460b-818d-59416391cc29 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1763.433321] env[63371]: DEBUG nova.compute.manager [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Refreshing instance network info cache due to event network-changed-5d79ab9c-2d92-460b-818d-59416391cc29. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1763.433660] env[63371]: DEBUG oslo_concurrency.lockutils [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] Acquiring lock "refresh_cache-943e2506-03a4-4633-b55b-381d9d8d9ef6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.433819] env[63371]: DEBUG oslo_concurrency.lockutils [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] Acquired lock "refresh_cache-943e2506-03a4-4633-b55b-381d9d8d9ef6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.434016] env[63371]: DEBUG nova.network.neutron [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Refreshing network info cache for port 5d79ab9c-2d92-460b-818d-59416391cc29 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1763.461126] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774701, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.462849] env[63371]: DEBUG oslo_concurrency.lockutils [req-48b09836-8c03-4b0e-baa6-a5488bc60fc1 req-0b35c604-d29b-42c5-b3f7-ea18dcf5de4c service nova] Releasing lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.614222] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a7802220-032f-404b-8adc-bfac88a29b79 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-d00602b9-16bf-4c11-bc47-6076dddbf159-4590c30b-effd-423f-b0b2-c208bbdfffd7" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.882s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1763.668739] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf40dcc-55d5-430c-af32-e8b624c4c5fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.682800] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774697, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.686503] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a97282d-8540-4eb1-81d4-451dd1176694 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.726842] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c166ec2a-ec2d-47b1-ad8a-72d3673cb5d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.739634] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad708b3d-5a96-4ace-bb0f-8b797bfc64f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.760540] env[63371]: DEBUG nova.compute.provider_tree [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1763.763509] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.792839] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 7e463dd7-84a6-4e6d-ae8f-0860e3a20f05] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1763.910788] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774698, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.83079} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.911355] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8/0c8c6997-bec8-4a3b-80cf-cbf35f3843f8.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1763.911813] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1763.912257] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66256be8-ed1f-4569-9855-437a7271da04 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.917699] env[63371]: DEBUG nova.compute.manager [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1763.922744] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1763.922744] env[63371]: value = "task-1774702" [ 1763.922744] env[63371]: _type = "Task" [ 1763.922744] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.942740] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774702, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.961281] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774701, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.183980] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774697, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.207978] env[63371]: DEBUG nova.network.neutron [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Updated VIF entry in instance network info cache for port 5d79ab9c-2d92-460b-818d-59416391cc29. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1764.208439] env[63371]: DEBUG nova.network.neutron [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Updating instance_info_cache with network_info: [{"id": "5d79ab9c-2d92-460b-818d-59416391cc29", "address": "fa:16:3e:d8:11:71", "network": {"id": "fd82ce5b-c777-464a-9d98-de8610c1cfc8", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1544091952-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f816d2e4eddd479a9dcc827a7828d119", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e30245c5-78f5-48e6-b504-c6c21f5a9b45", "external-id": "nsx-vlan-transportzone-409", "segmentation_id": 409, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d79ab9c-2d", "ovs_interfaceid": "5d79ab9c-2d92-460b-818d-59416391cc29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.295994] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 4ebd3a28-b2b2-4dc5-adf2-3e7e084241f6] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1764.298660] env[63371]: ERROR nova.scheduler.client.report [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [req-591c99b5-180e-4294-bfa7-ffb3c0b5001d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-591c99b5-180e-4294-bfa7-ffb3c0b5001d"}]} [ 1764.304161] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851e9289-d8a4-46c3-a6d7-82d35254af62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.335311] env[63371]: DEBUG nova.scheduler.client.report [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1764.338780] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e1c44b3-b67e-40dd-99f2-62c353774125 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.352533] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance '9862b0f0-ccf6-4e69-9e78-cf864adaa65e' progress to 83 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1764.361310] env[63371]: DEBUG nova.scheduler.client.report [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1764.361547] env[63371]: DEBUG nova.compute.provider_tree [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1764.382229] env[63371]: DEBUG nova.scheduler.client.report [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1764.410498] env[63371]: DEBUG nova.scheduler.client.report [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1764.448019] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774702, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116012} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.448019] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1764.448906] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0300e76-2605-4f9d-ab68-4989bcd358b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.481926] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8/0c8c6997-bec8-4a3b-80cf-cbf35f3843f8.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1764.488907] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2df3895a-08c3-4716-8d5f-7c5462c464bd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.503633] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774701, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.515821] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1764.515821] env[63371]: value = "task-1774703" [ 1764.515821] env[63371]: _type = "Task" [ 1764.515821] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.531058] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774703, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.543638] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.647474] env[63371]: DEBUG nova.network.neutron [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance_info_cache with network_info: [{"id": "e144cd6b-c3f5-496e-99c6-19e9ab58c042", "address": "fa:16:3e:99:d0:57", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape144cd6b-c3", "ovs_interfaceid": "e144cd6b-c3f5-496e-99c6-19e9ab58c042", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1764.672108] env[63371]: INFO nova.compute.manager [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Rebuilding instance [ 1764.686908] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774697, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.711856] env[63371]: DEBUG oslo_concurrency.lockutils [req-d3113cee-184c-4cb0-bb28-14cc2c8b260b req-d23a0460-2498-437e-b6be-0c443dbe515f service nova] Releasing lock "refresh_cache-943e2506-03a4-4633-b55b-381d9d8d9ef6" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.726480] env[63371]: DEBUG nova.compute.manager [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1764.727406] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78682997-b796-4dcb-aa30-bf29dfe4fb30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.800032] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 852e14a7-2f9f-421c-9804-56c885885c7d] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1764.819077] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c635c9-294b-4ae3-b85d-ba0f01a8cf77 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.828946] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7bf53b4-ea37-4196-94c2-5524399346a1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.871743] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e1eb2fbb-655b-427f-8230-75077cdacf68 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance '9862b0f0-ccf6-4e69-9e78-cf864adaa65e' progress to 100 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1764.877737] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec45753a-4bf7-4cb8-b981-da6dc2746399 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.888610] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3052fcc-3e39-4041-81c5-abdc1fefeaa0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.910547] env[63371]: DEBUG nova.compute.provider_tree [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1764.962368] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774701, 'name': CreateVM_Task, 'duration_secs': 1.650774} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.962557] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1764.963482] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.963537] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.963884] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1764.964117] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cca3904d-26c0-41fe-b7ae-89649c033dbe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.969322] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Waiting for the task: (returnval){ [ 1764.969322] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e20416-91c2-c2d5-57ff-c1d3bc8c5e07" [ 1764.969322] env[63371]: _type = "Task" [ 1764.969322] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.980140] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e20416-91c2-c2d5-57ff-c1d3bc8c5e07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.029712] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774703, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.151531] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "refresh_cache-3a6c12a7-732f-4a73-a8c5-6810b554cc03" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.152178] env[63371]: DEBUG nova.objects.instance [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lazy-loading 'migration_context' on Instance uuid 3a6c12a7-732f-4a73-a8c5-6810b554cc03 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1765.186028] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774697, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.124236} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.186377] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/496b93e2-5142-43b5-a0fc-8e75cb31f472/496b93e2-5142-43b5-a0fc-8e75cb31f472.vmdk to [datastore1] 9985dbcd-4498-4629-aae5-5e1933307c50/9985dbcd-4498-4629-aae5-5e1933307c50.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1765.187155] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65dfc3f6-8fff-4e24-ab68-f2214c1fcee7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.210770] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 9985dbcd-4498-4629-aae5-5e1933307c50/9985dbcd-4498-4629-aae5-5e1933307c50.vmdk or device None with type streamOptimized {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1765.211121] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7364c2a1-03ed-47d0-886c-4f29333b11a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.230774] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1765.230774] env[63371]: value = "task-1774704" [ 1765.230774] env[63371]: _type = "Task" [ 1765.230774] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.238835] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774704, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.249233] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1765.249520] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1cef3dad-ab18-4e82-b3a6-be79bee09dec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.256647] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Waiting for the task: (returnval){ [ 1765.256647] env[63371]: value = "task-1774705" [ 1765.256647] env[63371]: _type = "Task" [ 1765.256647] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.269040] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774705, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.305360] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: dc6ef0a7-1744-4b90-b385-913cb796f7d0] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1765.391038] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.391038] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.436011] env[63371]: ERROR nova.scheduler.client.report [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] [req-4eb00f60-cf38-41b4-9591-a8a8a99087b4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4eb00f60-cf38-41b4-9591-a8a8a99087b4"}]} [ 1765.455285] env[63371]: DEBUG nova.scheduler.client.report [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1765.470908] env[63371]: DEBUG nova.scheduler.client.report [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1765.471180] env[63371]: DEBUG nova.compute.provider_tree [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1765.483076] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e20416-91c2-c2d5-57ff-c1d3bc8c5e07, 'name': SearchDatastore_Task, 'duration_secs': 0.012589} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.483323] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1765.483555] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1765.483848] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1765.484022] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1765.484226] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1765.485190] env[63371]: DEBUG nova.scheduler.client.report [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1765.487274] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1f22131-fd12-4637-ab46-73e298f49fc5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.496806] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1765.497015] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1765.498234] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-330d438c-02f1-4cb4-a6ad-36a0de0dcfff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.504509] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Waiting for the task: (returnval){ [ 1765.504509] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d0a189-bd58-604e-ee66-2dc95d424bc6" [ 1765.504509] env[63371]: _type = "Task" [ 1765.504509] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.505459] env[63371]: DEBUG nova.scheduler.client.report [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1765.519718] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d0a189-bd58-604e-ee66-2dc95d424bc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.526602] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774703, 'name': ReconfigVM_Task, 'duration_secs': 0.611863} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.526886] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8/0c8c6997-bec8-4a3b-80cf-cbf35f3843f8.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1765.528572] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5bb555e1-604c-4c20-9737-370d8d7d094e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.534405] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1765.534405] env[63371]: value = "task-1774706" [ 1765.534405] env[63371]: _type = "Task" [ 1765.534405] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.544994] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "cf63c2a2-ee72-464e-944d-5e53ca8635ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.545956] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "cf63c2a2-ee72-464e-944d-5e53ca8635ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.545956] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "cf63c2a2-ee72-464e-944d-5e53ca8635ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.545956] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "cf63c2a2-ee72-464e-944d-5e53ca8635ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.545956] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "cf63c2a2-ee72-464e-944d-5e53ca8635ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.547375] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774706, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.550073] env[63371]: INFO nova.compute.manager [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Terminating instance [ 1765.553282] env[63371]: DEBUG nova.compute.manager [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1765.553479] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1765.554347] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818b5d1f-2656-41f9-adb5-95c7cc44e48a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.563840] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1765.566168] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c0be726d-6b96-4611-a3cd-7a7bab8f02af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.574885] env[63371]: DEBUG oslo_vmware.api [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1765.574885] env[63371]: value = "task-1774707" [ 1765.574885] env[63371]: _type = "Task" [ 1765.574885] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.586716] env[63371]: DEBUG oslo_vmware.api [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.654684] env[63371]: DEBUG nova.objects.base [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Object Instance<3a6c12a7-732f-4a73-a8c5-6810b554cc03> lazy-loaded attributes: info_cache,migration_context {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1765.655398] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e848dfa2-9584-46ce-b2ff-210231e8e5d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.681038] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e5e14e5-c46d-4139-ba51-378236570ef2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.686853] env[63371]: DEBUG oslo_vmware.api [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1765.686853] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b8d053-f8e1-a1c5-fa85-634d6776f786" [ 1765.686853] env[63371]: _type = "Task" [ 1765.686853] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.702168] env[63371]: DEBUG oslo_vmware.api [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b8d053-f8e1-a1c5-fa85-634d6776f786, 'name': SearchDatastore_Task, 'duration_secs': 0.010662} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.702966] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.743351] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774704, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.768141] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774705, 'name': PowerOffVM_Task, 'duration_secs': 0.134273} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.768425] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1765.768643] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1765.769447] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e60363f6-1a3a-4a69-a48e-7b2e11e0f7d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.780747] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1765.780747] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2094ce18-3337-4e22-9d10-5495e443cdd6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.806121] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1765.806343] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1765.806522] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Deleting the datastore file [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1765.810021] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b549d903-de11-4834-8f60-f91c54214a99 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.810705] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e00c2e45-b8bc-440b-8b58-a21f127192c7] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1765.813765] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Waiting for the task: (returnval){ [ 1765.813765] env[63371]: value = "task-1774709" [ 1765.813765] env[63371]: _type = "Task" [ 1765.813765] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.827666] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774709, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.849275] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5b0b5aa-327b-45b8-a480-49701d5982e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.858175] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e31992-dbae-445a-afd5-275ca908f3cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.900743] env[63371]: DEBUG nova.compute.manager [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1765.905014] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338d5f06-5a6f-458a-a86c-2c585f485107 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.912354] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6664c56d-2843-4fef-9e58-bc9cc367264b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.929084] env[63371]: DEBUG nova.compute.provider_tree [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1765.940636] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "b523486c-adae-4322-80be-1f3bf33ca192" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.940636] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.940636] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "b523486c-adae-4322-80be-1f3bf33ca192-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1765.940896] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1765.940896] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1765.943173] env[63371]: INFO nova.compute.manager [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Terminating instance [ 1765.945063] env[63371]: DEBUG nova.compute.manager [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1765.945261] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1765.946309] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc28f85-883a-40d0-8fd0-c9d8dc36acbf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.954585] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1765.954832] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01e158bd-07f3-4d1b-b50c-8402ad293513 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.962371] env[63371]: DEBUG oslo_vmware.api [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1765.962371] env[63371]: value = "task-1774710" [ 1765.962371] env[63371]: _type = "Task" [ 1765.962371] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.979028] env[63371]: DEBUG oslo_vmware.api [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774710, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.015635] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d0a189-bd58-604e-ee66-2dc95d424bc6, 'name': SearchDatastore_Task, 'duration_secs': 0.013757} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.016569] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fe811bf-9949-4614-9aab-2c6996368914 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.022917] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Waiting for the task: (returnval){ [ 1766.022917] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]523965b5-cf74-c966-6e4b-2da3f4424dea" [ 1766.022917] env[63371]: _type = "Task" [ 1766.022917] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.032877] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523965b5-cf74-c966-6e4b-2da3f4424dea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.046075] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774706, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.083614] env[63371]: DEBUG oslo_vmware.api [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774707, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.194659] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "interface-d00602b9-16bf-4c11-bc47-6076dddbf159-57835801-cbba-4176-8f6b-8d0ec76aa66e" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.195089] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-d00602b9-16bf-4c11-bc47-6076dddbf159-57835801-cbba-4176-8f6b-8d0ec76aa66e" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.245621] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774704, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.315243] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 76c861a7-30f2-40f4-b723-7912975f36f8] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1766.327792] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774709, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206019} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.328071] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1766.328303] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1766.328512] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1766.431747] env[63371]: DEBUG nova.scheduler.client.report [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1766.439715] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1766.473823] env[63371]: DEBUG oslo_vmware.api [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774710, 'name': PowerOffVM_Task, 'duration_secs': 0.383837} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.474580] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1766.474580] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1766.474580] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a089fdbf-e218-46a5-889a-f627069d154c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.535151] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]523965b5-cf74-c966-6e4b-2da3f4424dea, 'name': SearchDatastore_Task, 'duration_secs': 0.011405} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.535386] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.536009] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 943e2506-03a4-4633-b55b-381d9d8d9ef6/943e2506-03a4-4633-b55b-381d9d8d9ef6.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1766.536117] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbe62d1f-1700-421b-bc51-ed11c3a8eae6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.551084] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774706, 'name': Rename_Task, 'duration_secs': 0.719591} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.552939] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1766.554086] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Waiting for the task: (returnval){ [ 1766.554086] env[63371]: value = "task-1774712" [ 1766.554086] env[63371]: _type = "Task" [ 1766.554086] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.554372] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6437dd35-20e6-4278-a737-1a3a1db92614 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.562952] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1766.562952] env[63371]: value = "task-1774713" [ 1766.562952] env[63371]: _type = "Task" [ 1766.562952] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.565906] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774712, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.576216] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774713, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.585112] env[63371]: DEBUG oslo_vmware.api [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774707, 'name': PowerOffVM_Task, 'duration_secs': 0.697973} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.585344] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1766.585575] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1766.585851] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-340ce89d-6f96-4a1d-86c2-5f3f51ff0fb3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.606235] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1766.606516] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1766.606735] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Deleting the datastore file [datastore1] b523486c-adae-4322-80be-1f3bf33ca192 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1766.607039] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6619b44-0f31-4ed3-ab86-6c974e3a063b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.615464] env[63371]: DEBUG oslo_vmware.api [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1766.615464] env[63371]: value = "task-1774715" [ 1766.615464] env[63371]: _type = "Task" [ 1766.615464] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.627791] env[63371]: DEBUG oslo_vmware.api [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774715, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.677616] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1766.677891] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1766.678109] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleting the datastore file [datastore1] cf63c2a2-ee72-464e-944d-5e53ca8635ac {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1766.678433] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1365bd46-7172-4e8f-8978-e266d2321a04 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.685725] env[63371]: DEBUG oslo_vmware.api [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1766.685725] env[63371]: value = "task-1774716" [ 1766.685725] env[63371]: _type = "Task" [ 1766.685725] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.694465] env[63371]: DEBUG oslo_vmware.api [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774716, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.697810] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.698214] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.698926] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f4eb19-ab90-4136-8577-13529ac9d8a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.721115] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d90bdaa-f775-4515-8a18-873876dff743 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.755810] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Reconfiguring VM to detach interface {{(pid=63371) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1766.759430] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f971230e-d4a5-4740-a11f-1c6a138ac465 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.781941] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774704, 'name': ReconfigVM_Task, 'duration_secs': 1.174779} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.782956] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 9985dbcd-4498-4629-aae5-5e1933307c50/9985dbcd-4498-4629-aae5-5e1933307c50.vmdk or device None with type streamOptimized {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1766.783552] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1766.783552] env[63371]: value = "task-1774717" [ 1766.783552] env[63371]: _type = "Task" [ 1766.783552] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.783813] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55b0b08f-1ac0-46f8-8b66-7d6367227b72 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.799840] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1766.799840] env[63371]: value = "task-1774718" [ 1766.799840] env[63371]: _type = "Task" [ 1766.799840] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.800655] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.810040] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774718, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.823796] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3f79bc3e-4dd4-4b5f-a5ba-a17124e70406] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1766.936869] env[63371]: DEBUG oslo_concurrency.lockutils [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.119s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.941027] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.033s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.942730] env[63371]: INFO nova.compute.claims [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1767.068262] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774712, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.076886] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774713, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.129852] env[63371]: DEBUG oslo_vmware.api [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774715, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.349276} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.129981] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1767.130194] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1767.130441] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1767.130788] env[63371]: INFO nova.compute.manager [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1767.131087] env[63371]: DEBUG oslo.service.loopingcall [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1767.131331] env[63371]: DEBUG nova.compute.manager [-] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1767.131430] env[63371]: DEBUG nova.network.neutron [-] [instance: b523486c-adae-4322-80be-1f3bf33ca192] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1767.149825] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "9885de9e-c640-4d82-a47a-980988d89deb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.150087] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "9885de9e-c640-4d82-a47a-980988d89deb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.195098] env[63371]: DEBUG oslo_vmware.api [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774716, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.295960] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.313734] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774718, 'name': Rename_Task, 'duration_secs': 0.236333} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.313734] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1767.313734] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb92e96e-fcbc-4950-ae28-9ad3b30a0b3f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.319850] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1767.319850] env[63371]: value = "task-1774719" [ 1767.319850] env[63371]: _type = "Task" [ 1767.319850] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.328027] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: af1281ba-c3be-43b4-a039-86d94bd9efe4] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1767.328331] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774719, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.369419] env[63371]: DEBUG nova.virt.hardware [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1767.369713] env[63371]: DEBUG nova.virt.hardware [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1767.369902] env[63371]: DEBUG nova.virt.hardware [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1767.370656] env[63371]: DEBUG nova.virt.hardware [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1767.370790] env[63371]: DEBUG nova.virt.hardware [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1767.370988] env[63371]: DEBUG nova.virt.hardware [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1767.371305] env[63371]: DEBUG nova.virt.hardware [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1767.371573] env[63371]: DEBUG nova.virt.hardware [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1767.371811] env[63371]: DEBUG nova.virt.hardware [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1767.372042] env[63371]: DEBUG nova.virt.hardware [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1767.372274] env[63371]: DEBUG nova.virt.hardware [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1767.373924] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27db65f-ac08-4af0-b0c4-47b9807d6b3b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.377221] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.377501] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.377725] env[63371]: DEBUG nova.compute.manager [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Going to confirm migration 5 {{(pid=63371) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1767.385486] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5054fe-c37e-4eb9-a414-80c012523706 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.400954] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Instance VIF info [] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1767.406954] env[63371]: DEBUG oslo.service.loopingcall [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1767.407584] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1767.407878] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0604e523-1e4d-49c6-8552-1b675a67358c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.428456] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1767.428456] env[63371]: value = "task-1774720" [ 1767.428456] env[63371]: _type = "Task" [ 1767.428456] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.438291] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774720, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.460450] env[63371]: DEBUG oslo_concurrency.lockutils [None req-756715cd-54e2-43cd-b190-d35f607e7dcd tempest-ServerActionsV293TestJSON-417627862 tempest-ServerActionsV293TestJSON-417627862-project-member] Lock "c03e2dc4-75d9-4fbb-afc8-046cbbf908ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.674s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1767.567766] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774712, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582541} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.568061] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 943e2506-03a4-4633-b55b-381d9d8d9ef6/943e2506-03a4-4633-b55b-381d9d8d9ef6.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1767.569515] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1767.569515] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d497e817-56b3-4976-95be-b369c416f603 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.579088] env[63371]: DEBUG oslo_vmware.api [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774713, 'name': PowerOnVM_Task, 'duration_secs': 0.663121} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.580413] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1767.580646] env[63371]: INFO nova.compute.manager [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Took 11.88 seconds to spawn the instance on the hypervisor. [ 1767.580825] env[63371]: DEBUG nova.compute.manager [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1767.581221] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Waiting for the task: (returnval){ [ 1767.581221] env[63371]: value = "task-1774721" [ 1767.581221] env[63371]: _type = "Task" [ 1767.581221] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.582694] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9da042-5b59-4e08-9137-3fdb0987b9d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.597538] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774721, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.652978] env[63371]: DEBUG nova.compute.manager [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1767.696060] env[63371]: DEBUG oslo_vmware.api [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774716, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.885857} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.696324] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1767.696510] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1767.696685] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1767.696942] env[63371]: INFO nova.compute.manager [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Took 2.14 seconds to destroy the instance on the hypervisor. [ 1767.697287] env[63371]: DEBUG oslo.service.loopingcall [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1767.698265] env[63371]: DEBUG nova.compute.manager [-] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1767.698399] env[63371]: DEBUG nova.network.neutron [-] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1767.723655] env[63371]: DEBUG nova.compute.manager [req-7aa03873-e0c6-4517-8d4f-b33e66d0d6c4 req-5ad5f97d-5675-4951-90b0-34e86fa50979 service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Received event network-vif-deleted-993ff886-27f6-48cd-be00-f0e8d292b060 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1767.723801] env[63371]: INFO nova.compute.manager [req-7aa03873-e0c6-4517-8d4f-b33e66d0d6c4 req-5ad5f97d-5675-4951-90b0-34e86fa50979 service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Neutron deleted interface 993ff886-27f6-48cd-be00-f0e8d292b060; detaching it from the instance and deleting it from the info cache [ 1767.724096] env[63371]: DEBUG nova.network.neutron [req-7aa03873-e0c6-4517-8d4f-b33e66d0d6c4 req-5ad5f97d-5675-4951-90b0-34e86fa50979 service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1767.800332] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.830743] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 713dfaf5-d11f-4af2-af92-66a596b0ed4a] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1767.833644] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774719, 'name': PowerOnVM_Task} progress is 86%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.929631] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.929839] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.930030] env[63371]: DEBUG nova.network.neutron [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1767.930225] env[63371]: DEBUG nova.objects.instance [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'info_cache' on Instance uuid 9862b0f0-ccf6-4e69-9e78-cf864adaa65e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1767.941444] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774720, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.094322] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774721, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144501} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.094606] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1768.095875] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e09e5e-a5d9-4193-b20a-ae1aa6cbf025 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.109972] env[63371]: INFO nova.compute.manager [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Took 28.51 seconds to build instance. [ 1768.120438] env[63371]: DEBUG nova.network.neutron [-] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.131106] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 943e2506-03a4-4633-b55b-381d9d8d9ef6/943e2506-03a4-4633-b55b-381d9d8d9ef6.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1768.135184] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02b7f9da-9913-4f8a-812c-c78f45357a4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.164095] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Waiting for the task: (returnval){ [ 1768.164095] env[63371]: value = "task-1774722" [ 1768.164095] env[63371]: _type = "Task" [ 1768.164095] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.183991] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.188478] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.229362] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4dd832bc-1214-49b5-93c8-d60da461c2e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.242815] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b662415-6afb-4ff9-a824-f42bbafbc3d5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.284305] env[63371]: DEBUG nova.compute.manager [req-7aa03873-e0c6-4517-8d4f-b33e66d0d6c4 req-5ad5f97d-5675-4951-90b0-34e86fa50979 service nova] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Detach interface failed, port_id=993ff886-27f6-48cd-be00-f0e8d292b060, reason: Instance b523486c-adae-4322-80be-1f3bf33ca192 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1768.297745] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.332131] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774719, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.335028] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e912c210-3ae1-47ce-b9cd-afebf6195606] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1768.337994] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf72abd-7ebe-4b63-8a62-7680bf95d576 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.345688] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d8ac7a-5dd4-4fcb-aee6-904dd1e70b72 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.386070] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15638fd7-a1c4-42a1-9b57-a444681d283a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.391314] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5516017-bf6a-4e1f-9a88-0cac89a4e90c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.406020] env[63371]: DEBUG nova.compute.provider_tree [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1768.443232] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774720, 'name': CreateVM_Task, 'duration_secs': 0.616343} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.443417] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1768.444363] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1768.444533] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1768.444841] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1768.445406] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ead7103-8601-4ffb-9621-5fcc65b63199 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.450563] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Waiting for the task: (returnval){ [ 1768.450563] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524b6591-0c8c-97b6-7548-adbd2bc576e6" [ 1768.450563] env[63371]: _type = "Task" [ 1768.450563] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.460220] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524b6591-0c8c-97b6-7548-adbd2bc576e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.462493] env[63371]: DEBUG nova.network.neutron [-] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.632901] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ed4d8ab5-d6e6-495c-821a-e672d2818580 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "0c8c6997-bec8-4a3b-80cf-cbf35f3843f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.046s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.635483] env[63371]: INFO nova.compute.manager [-] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Took 1.50 seconds to deallocate network for instance. [ 1768.679481] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.801660] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.832520] env[63371]: DEBUG oslo_vmware.api [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774719, 'name': PowerOnVM_Task, 'duration_secs': 1.123993} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.832910] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1768.841528] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e0369f27-68ea-49c4-8524-3dbbb3cde96e] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1768.909014] env[63371]: DEBUG nova.scheduler.client.report [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1768.945651] env[63371]: DEBUG nova.compute.manager [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1768.946272] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-237f5b79-c4c0-4a7e-95bc-163af960a214 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.965440] env[63371]: INFO nova.compute.manager [-] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Took 1.27 seconds to deallocate network for instance. [ 1768.966240] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524b6591-0c8c-97b6-7548-adbd2bc576e6, 'name': SearchDatastore_Task, 'duration_secs': 0.146582} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.967355] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.967601] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1768.969081] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1768.969081] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1768.969081] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1768.970963] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ed6bd933-067b-4f13-9f11-920908127299 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.034081] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1769.034296] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1769.035331] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ce92ba8-2eb1-4eae-96d9-4f8156b819f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.040600] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Waiting for the task: (returnval){ [ 1769.040600] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5242a121-80c4-8c7c-5ee0-315fae99923a" [ 1769.040600] env[63371]: _type = "Task" [ 1769.040600] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.048386] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5242a121-80c4-8c7c-5ee0-315fae99923a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.142717] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.178612] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774722, 'name': ReconfigVM_Task, 'duration_secs': 0.709147} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.178842] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 943e2506-03a4-4633-b55b-381d9d8d9ef6/943e2506-03a4-4633-b55b-381d9d8d9ef6.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1769.179497] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-428e16e7-a6d2-4972-87bf-a1a00732282a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.185259] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Waiting for the task: (returnval){ [ 1769.185259] env[63371]: value = "task-1774723" [ 1769.185259] env[63371]: _type = "Task" [ 1769.185259] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.193539] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774723, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.214183] env[63371]: DEBUG nova.network.neutron [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance_info_cache with network_info: [{"id": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "address": "fa:16:3e:50:09:23", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82aece5e-dc", "ovs_interfaceid": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1769.261908] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069a041a-ab5f-4d8c-a147-322e0ed4bc29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.270605] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a6550c60-5a01-4d78-a589-596f09356fb1 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Suspending the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1769.270883] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-cfd72a71-d200-4b8e-9158-cb8f03161cb7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.277707] env[63371]: DEBUG oslo_vmware.api [None req-a6550c60-5a01-4d78-a589-596f09356fb1 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1769.277707] env[63371]: value = "task-1774724" [ 1769.277707] env[63371]: _type = "Task" [ 1769.277707] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.286842] env[63371]: DEBUG oslo_vmware.api [None req-a6550c60-5a01-4d78-a589-596f09356fb1 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774724, 'name': SuspendVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.297304] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.344953] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e4608e3c-7083-42fa-b88c-8ee007ef7f60] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1769.414016] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.414569] env[63371]: DEBUG nova.compute.manager [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1769.417338] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.394s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.417557] env[63371]: DEBUG nova.objects.instance [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lazy-loading 'resources' on Instance uuid 7349ecf6-2de7-4540-b713-7e29cbd3ff0b {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1769.468128] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6fcb6462-d882-4d62-b3a1-fa1a220f0bd7 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "9985dbcd-4498-4629-aae5-5e1933307c50" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 40.718s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.474531] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1769.552678] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5242a121-80c4-8c7c-5ee0-315fae99923a, 'name': SearchDatastore_Task, 'duration_secs': 0.033321} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.553510] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2909f32-b72a-47de-86e9-25945103d8f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.559178] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Waiting for the task: (returnval){ [ 1769.559178] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c4ee7d-51fe-c79d-4155-987a60acc240" [ 1769.559178] env[63371]: _type = "Task" [ 1769.559178] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.566969] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c4ee7d-51fe-c79d-4155-987a60acc240, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.695841] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774723, 'name': Rename_Task, 'duration_secs': 0.15171} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.696116] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1769.696382] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44690737-79c8-420b-abd6-f5991a3ac20d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.702806] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Waiting for the task: (returnval){ [ 1769.702806] env[63371]: value = "task-1774725" [ 1769.702806] env[63371]: _type = "Task" [ 1769.702806] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.710102] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774725, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.716683] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1769.716915] env[63371]: DEBUG nova.objects.instance [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'migration_context' on Instance uuid 9862b0f0-ccf6-4e69-9e78-cf864adaa65e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1769.763420] env[63371]: DEBUG nova.compute.manager [req-b364c4a5-8168-4ff2-8938-34db43489c19 req-c2b24c27-8f01-40ee-ab7f-c1225b16cd78 service nova] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Received event network-vif-deleted-bcf2f3d2-8a1b-4315-97ce-63cf7f4ef30f {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1769.788009] env[63371]: DEBUG oslo_vmware.api [None req-a6550c60-5a01-4d78-a589-596f09356fb1 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774724, 'name': SuspendVM_Task} progress is 70%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.797731] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.848670] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: cd5f1b3d-9c73-45bf-9ef4-8599ad0ea2e1] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1769.921944] env[63371]: DEBUG nova.compute.utils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1769.929022] env[63371]: DEBUG nova.compute.manager [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1769.929022] env[63371]: DEBUG nova.network.neutron [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1770.004524] env[63371]: DEBUG nova.policy [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4302f381e0948438b9ee23a33a0f982', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35882164a8734563a006675f2ec6ba71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1770.072134] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c4ee7d-51fe-c79d-4155-987a60acc240, 'name': SearchDatastore_Task, 'duration_secs': 0.013595} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.074351] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1770.074735] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad/e16e4a55-4198-4308-b12c-d9ac07daecad.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1770.075510] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42317078-1b1d-436c-9974-3bdc6afa74cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.085020] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Waiting for the task: (returnval){ [ 1770.085020] env[63371]: value = "task-1774726" [ 1770.085020] env[63371]: _type = "Task" [ 1770.085020] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.094441] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774726, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.213536] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774725, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.219238] env[63371]: DEBUG nova.objects.base [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Object Instance<9862b0f0-ccf6-4e69-9e78-cf864adaa65e> lazy-loaded attributes: info_cache,migration_context {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1770.221017] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006e5c5f-b2e3-4aa1-a746-1ae6baad7a78 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.243289] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2d68449-5aa5-491b-b34e-f27535906ee3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.249694] env[63371]: DEBUG oslo_vmware.api [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1770.249694] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5220f381-042b-030a-67db-9201718be2c4" [ 1770.249694] env[63371]: _type = "Task" [ 1770.249694] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.258776] env[63371]: DEBUG oslo_vmware.api [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5220f381-042b-030a-67db-9201718be2c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.298889] env[63371]: DEBUG oslo_vmware.api [None req-a6550c60-5a01-4d78-a589-596f09356fb1 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774724, 'name': SuspendVM_Task, 'duration_secs': 0.656829} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.302555] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a6550c60-5a01-4d78-a589-596f09356fb1 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Suspended the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1770.302789] env[63371]: DEBUG nova.compute.manager [None req-a6550c60-5a01-4d78-a589-596f09356fb1 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1770.304743] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7838b3a3-e17a-46d3-8ca9-9eb808c64fd3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.310835] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.318571] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01485e8b-1892-4581-97cb-1e8f28c36d16 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.327223] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f5c4296-7219-409b-a231-9f126b527493 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.367677] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: cfbd0c7c-243e-497a-acb1-ab9323c23574] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1770.371541] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23061d72-4302-4e59-8bc5-6ea67ac09bc2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.381508] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03ce19b-7089-4bb6-a1ec-27b30c5ec5ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.400331] env[63371]: DEBUG nova.compute.provider_tree [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1770.411801] env[63371]: DEBUG nova.network.neutron [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Successfully created port: febc3a69-64cf-48c1-8399-147f35d89c61 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1770.427921] env[63371]: DEBUG nova.compute.manager [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1770.592969] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774726, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.714299] env[63371]: DEBUG oslo_vmware.api [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774725, 'name': PowerOnVM_Task, 'duration_secs': 0.5409} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.714573] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1770.714795] env[63371]: INFO nova.compute.manager [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Took 11.35 seconds to spawn the instance on the hypervisor. [ 1770.714980] env[63371]: DEBUG nova.compute.manager [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1770.715786] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-420dab83-f1f8-46f5-94fc-bc3e7c45a72e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.760091] env[63371]: DEBUG oslo_vmware.api [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5220f381-042b-030a-67db-9201718be2c4, 'name': SearchDatastore_Task, 'duration_secs': 0.042171} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.761092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.800236] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.875609] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: cbcdfe1a-86a4-4a12-99b5-44d291d41769] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1770.905785] env[63371]: DEBUG nova.scheduler.client.report [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1771.096326] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774726, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.549075} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.096641] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad/e16e4a55-4198-4308-b12c-d9ac07daecad.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1771.096891] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1771.097164] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ccbb7eb9-6b15-4bd6-8e10-84fe6b850cbd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.104502] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Waiting for the task: (returnval){ [ 1771.104502] env[63371]: value = "task-1774727" [ 1771.104502] env[63371]: _type = "Task" [ 1771.104502] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.112794] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774727, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.177296] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256d87af-c68c-4b9c-a06f-961fca09546c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.188029] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-558b5810-24a9-4dff-901f-7867bb864329 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Suspending the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1771.188029] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-1028bb8a-81e6-44f0-88eb-1c8010bb1069 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.195307] env[63371]: DEBUG oslo_vmware.api [None req-558b5810-24a9-4dff-901f-7867bb864329 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1771.195307] env[63371]: value = "task-1774728" [ 1771.195307] env[63371]: _type = "Task" [ 1771.195307] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.205342] env[63371]: DEBUG oslo_vmware.api [None req-558b5810-24a9-4dff-901f-7867bb864329 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774728, 'name': SuspendVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.235980] env[63371]: INFO nova.compute.manager [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Took 29.59 seconds to build instance. [ 1771.300150] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.380933] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 33cf00ea-3195-41cf-9b7a-a8e64496a122] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1771.411260] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.994s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.413662] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.020s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.415357] env[63371]: INFO nova.compute.claims [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1771.433758] env[63371]: INFO nova.scheduler.client.report [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Deleted allocations for instance 7349ecf6-2de7-4540-b713-7e29cbd3ff0b [ 1771.439160] env[63371]: DEBUG nova.compute.manager [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1771.464441] env[63371]: DEBUG nova.virt.hardware [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1771.464688] env[63371]: DEBUG nova.virt.hardware [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1771.464846] env[63371]: DEBUG nova.virt.hardware [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1771.465047] env[63371]: DEBUG nova.virt.hardware [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1771.465279] env[63371]: DEBUG nova.virt.hardware [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1771.465624] env[63371]: DEBUG nova.virt.hardware [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1771.465707] env[63371]: DEBUG nova.virt.hardware [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1771.465812] env[63371]: DEBUG nova.virt.hardware [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1771.465983] env[63371]: DEBUG nova.virt.hardware [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1771.466243] env[63371]: DEBUG nova.virt.hardware [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1771.466320] env[63371]: DEBUG nova.virt.hardware [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1771.467454] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d748a2-2cf7-4939-acda-98541831b018 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.476221] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a50d34c-0a13-488c-a429-14800d540975 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.615112] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774727, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11269} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.615395] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1771.616324] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb42ed98-f6b1-4a77-a061-63788a5e85e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.636923] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad/e16e4a55-4198-4308-b12c-d9ac07daecad.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1771.637137] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2aff442-72e2-49e4-be03-52aca31b9360 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.657489] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Waiting for the task: (returnval){ [ 1771.657489] env[63371]: value = "task-1774729" [ 1771.657489] env[63371]: _type = "Task" [ 1771.657489] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.668117] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774729, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.684817] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "0c8c6997-bec8-4a3b-80cf-cbf35f3843f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.685107] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "0c8c6997-bec8-4a3b-80cf-cbf35f3843f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.685319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "0c8c6997-bec8-4a3b-80cf-cbf35f3843f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1771.685501] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "0c8c6997-bec8-4a3b-80cf-cbf35f3843f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1771.685669] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "0c8c6997-bec8-4a3b-80cf-cbf35f3843f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.689854] env[63371]: INFO nova.compute.manager [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Terminating instance [ 1771.691739] env[63371]: DEBUG nova.compute.manager [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1771.691931] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1771.692764] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad3c670-be4a-4e77-983d-846761da7511 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.702258] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1771.702878] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-91dcb08e-8704-4bd0-b6f2-28562ae75700 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.707176] env[63371]: DEBUG oslo_vmware.api [None req-558b5810-24a9-4dff-901f-7867bb864329 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774728, 'name': SuspendVM_Task} progress is 12%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.738334] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58648f62-0a7e-4744-8148-cb0014a6be58 tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Lock "943e2506-03a4-4633-b55b-381d9d8d9ef6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.109s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1771.802130] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.824016] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1771.824263] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1771.824439] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleting the datastore file [datastore1] 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1771.824712] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-adabd04c-de16-4fad-b058-a4909b29a4d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.831238] env[63371]: DEBUG oslo_vmware.api [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1771.831238] env[63371]: value = "task-1774731" [ 1771.831238] env[63371]: _type = "Task" [ 1771.831238] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.839486] env[63371]: DEBUG oslo_vmware.api [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774731, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.884300] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 201a2d1e-9e2c-4c07-92be-200408874ad4] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1771.945297] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1acbdb58-95a3-4a3f-aca1-5c4edfbc8567 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "7349ecf6-2de7-4540-b713-7e29cbd3ff0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.678s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.052981] env[63371]: DEBUG nova.compute.manager [req-ba9d5e81-8205-4d36-9267-9cc831b96638 req-74aede36-11f6-407a-838d-73a5dff2f69e service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Received event network-vif-plugged-febc3a69-64cf-48c1-8399-147f35d89c61 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1772.053217] env[63371]: DEBUG oslo_concurrency.lockutils [req-ba9d5e81-8205-4d36-9267-9cc831b96638 req-74aede36-11f6-407a-838d-73a5dff2f69e service nova] Acquiring lock "da4839fa-8597-411c-b30c-0ac9226fec1f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.053458] env[63371]: DEBUG oslo_concurrency.lockutils [req-ba9d5e81-8205-4d36-9267-9cc831b96638 req-74aede36-11f6-407a-838d-73a5dff2f69e service nova] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.053760] env[63371]: DEBUG oslo_concurrency.lockutils [req-ba9d5e81-8205-4d36-9267-9cc831b96638 req-74aede36-11f6-407a-838d-73a5dff2f69e service nova] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.053812] env[63371]: DEBUG nova.compute.manager [req-ba9d5e81-8205-4d36-9267-9cc831b96638 req-74aede36-11f6-407a-838d-73a5dff2f69e service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] No waiting events found dispatching network-vif-plugged-febc3a69-64cf-48c1-8399-147f35d89c61 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1772.053992] env[63371]: WARNING nova.compute.manager [req-ba9d5e81-8205-4d36-9267-9cc831b96638 req-74aede36-11f6-407a-838d-73a5dff2f69e service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Received unexpected event network-vif-plugged-febc3a69-64cf-48c1-8399-147f35d89c61 for instance with vm_state building and task_state spawning. [ 1772.169573] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774729, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.193108] env[63371]: DEBUG nova.network.neutron [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Successfully updated port: febc3a69-64cf-48c1-8399-147f35d89c61 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1772.206716] env[63371]: DEBUG oslo_vmware.api [None req-558b5810-24a9-4dff-901f-7867bb864329 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774728, 'name': SuspendVM_Task} progress is 58%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.301686] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.340471] env[63371]: DEBUG oslo_vmware.api [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774731, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17791} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.340746] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1772.340930] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1772.341118] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1772.341292] env[63371]: INFO nova.compute.manager [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1772.341556] env[63371]: DEBUG oslo.service.loopingcall [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1772.341765] env[63371]: DEBUG nova.compute.manager [-] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1772.341880] env[63371]: DEBUG nova.network.neutron [-] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1772.387541] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: f7c6e1f1-8fa5-40b8-abcb-56944d1c13ed] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1772.672377] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774729, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.696110] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "refresh_cache-da4839fa-8597-411c-b30c-0ac9226fec1f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.696267] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "refresh_cache-da4839fa-8597-411c-b30c-0ac9226fec1f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.696413] env[63371]: DEBUG nova.network.neutron [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1772.709224] env[63371]: DEBUG oslo_vmware.api [None req-558b5810-24a9-4dff-901f-7867bb864329 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774728, 'name': SuspendVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.806268] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.811215] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cd10ad-1f77-4388-928c-260df923cb10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.818423] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e5fa34-652e-4ea9-a20b-2a18c2e8c93d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.849298] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c41f31b-f352-4ef5-9acd-c123ac9e32d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.857021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac1717f-d566-4df4-9aef-6294b8e67964 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.871355] env[63371]: DEBUG nova.compute.provider_tree [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1772.890914] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 47c1c242-d190-4523-8033-307c5a9b7535] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1773.040034] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "f391d4f3-6e9d-4ddc-918a-8dc8581dfc00" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.042495] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "f391d4f3-6e9d-4ddc-918a-8dc8581dfc00" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.042495] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "f391d4f3-6e9d-4ddc-918a-8dc8581dfc00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.042495] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "f391d4f3-6e9d-4ddc-918a-8dc8581dfc00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.042495] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "f391d4f3-6e9d-4ddc-918a-8dc8581dfc00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.043757] env[63371]: INFO nova.compute.manager [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Terminating instance [ 1773.045228] env[63371]: DEBUG nova.compute.manager [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1773.046039] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1773.046355] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-794b60e3-babd-4e9f-99f0-eb97e663b7b6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.056026] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1773.056279] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcb7681d-3da1-4cee-b740-b5e420d03cfc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.062490] env[63371]: DEBUG oslo_vmware.api [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1773.062490] env[63371]: value = "task-1774732" [ 1773.062490] env[63371]: _type = "Task" [ 1773.062490] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.072953] env[63371]: DEBUG oslo_vmware.api [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774732, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.117655] env[63371]: DEBUG nova.network.neutron [-] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.169439] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774729, 'name': ReconfigVM_Task, 'duration_secs': 1.207973} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.169724] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Reconfigured VM instance instance-0000005f to attach disk [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad/e16e4a55-4198-4308-b12c-d9ac07daecad.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1773.170367] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05f3f555-abba-4439-aef0-e6af64c06100 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.177152] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Waiting for the task: (returnval){ [ 1773.177152] env[63371]: value = "task-1774733" [ 1773.177152] env[63371]: _type = "Task" [ 1773.177152] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.191997] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774733, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.209957] env[63371]: DEBUG oslo_vmware.api [None req-558b5810-24a9-4dff-901f-7867bb864329 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774728, 'name': SuspendVM_Task, 'duration_secs': 1.513491} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.210919] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-558b5810-24a9-4dff-901f-7867bb864329 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Suspended the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1773.210919] env[63371]: DEBUG nova.compute.manager [None req-558b5810-24a9-4dff-901f-7867bb864329 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1773.211671] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b88ea3b-e1b8-485a-972f-e3b0a45c4838 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.250172] env[63371]: DEBUG nova.network.neutron [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1773.302727] env[63371]: DEBUG oslo_vmware.api [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774717, 'name': ReconfigVM_Task, 'duration_secs': 6.117949} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.305239] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1773.305458] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Reconfigured VM to detach interface {{(pid=63371) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1773.392417] env[63371]: ERROR nova.scheduler.client.report [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [req-57775505-5b68-4b31-af6b-58aa4d05be42] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-57775505-5b68-4b31-af6b-58aa4d05be42"}]} [ 1773.394961] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ca53accc-a15f-4503-87e5-7cbf3e2c0b43] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 1773.414019] env[63371]: DEBUG nova.scheduler.client.report [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1773.428781] env[63371]: DEBUG nova.scheduler.client.report [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1773.429102] env[63371]: DEBUG nova.compute.provider_tree [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1773.432818] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Acquiring lock "943e2506-03a4-4633-b55b-381d9d8d9ef6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.433101] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Lock "943e2506-03a4-4633-b55b-381d9d8d9ef6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.433349] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Acquiring lock "943e2506-03a4-4633-b55b-381d9d8d9ef6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.433588] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Lock "943e2506-03a4-4633-b55b-381d9d8d9ef6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1773.433832] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Lock "943e2506-03a4-4633-b55b-381d9d8d9ef6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1773.436633] env[63371]: INFO nova.compute.manager [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Terminating instance [ 1773.438366] env[63371]: DEBUG nova.compute.manager [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1773.438550] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1773.439452] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa111627-6ffa-41ce-9359-cdd2d05a178e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.443092] env[63371]: DEBUG nova.scheduler.client.report [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1773.449911] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1773.450170] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee037168-486a-4cba-af13-ac7335ecdef2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.456639] env[63371]: DEBUG oslo_vmware.api [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Waiting for the task: (returnval){ [ 1773.456639] env[63371]: value = "task-1774734" [ 1773.456639] env[63371]: _type = "Task" [ 1773.456639] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.462707] env[63371]: DEBUG nova.scheduler.client.report [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1773.467955] env[63371]: DEBUG oslo_vmware.api [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774734, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.477617] env[63371]: DEBUG nova.network.neutron [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Updating instance_info_cache with network_info: [{"id": "febc3a69-64cf-48c1-8399-147f35d89c61", "address": "fa:16:3e:49:d0:b8", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfebc3a69-64", "ovs_interfaceid": "febc3a69-64cf-48c1-8399-147f35d89c61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.576707] env[63371]: DEBUG oslo_vmware.api [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774732, 'name': PowerOffVM_Task, 'duration_secs': 0.225697} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.579373] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1773.579719] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1773.580679] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d11c1dc-9768-485f-8ce2-057c7660ad53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.620942] env[63371]: INFO nova.compute.manager [-] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Took 1.28 seconds to deallocate network for instance. [ 1773.668734] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1773.668894] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1773.669094] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Deleting the datastore file [datastore1] f391d4f3-6e9d-4ddc-918a-8dc8581dfc00 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1773.669352] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6928db10-bf74-48fa-8841-f2f284323170 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.678522] env[63371]: DEBUG oslo_vmware.api [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1773.678522] env[63371]: value = "task-1774736" [ 1773.678522] env[63371]: _type = "Task" [ 1773.678522] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.696131] env[63371]: DEBUG oslo_vmware.api [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774736, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.699288] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774733, 'name': Rename_Task, 'duration_secs': 0.265892} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.702617] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1773.703724] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0e3dfe16-b431-497f-8988-114e321835f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.712663] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Waiting for the task: (returnval){ [ 1773.712663] env[63371]: value = "task-1774737" [ 1773.712663] env[63371]: _type = "Task" [ 1773.712663] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.728387] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774737, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.781037] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72aa24ae-79d1-4567-94b2-7b5e97d363f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.790034] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8629b4-4177-44a6-ae58-5f8d32c1e1ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.825708] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c3a4ab-9b41-4fa2-9390-8c93757ccc9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.833979] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b511dc-9310-481b-b9be-eb20443d5512 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.848175] env[63371]: DEBUG nova.compute.provider_tree [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1773.901303] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1773.901496] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances with incomplete migration {{(pid=63371) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 1773.966978] env[63371]: DEBUG oslo_vmware.api [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774734, 'name': PowerOffVM_Task, 'duration_secs': 0.239355} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.967355] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1773.967561] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1773.967881] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-299d5337-607f-4d6d-a51f-4f159479380f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.981781] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "refresh_cache-da4839fa-8597-411c-b30c-0ac9226fec1f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1773.982224] env[63371]: DEBUG nova.compute.manager [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Instance network_info: |[{"id": "febc3a69-64cf-48c1-8399-147f35d89c61", "address": "fa:16:3e:49:d0:b8", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfebc3a69-64", "ovs_interfaceid": "febc3a69-64cf-48c1-8399-147f35d89c61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1773.982751] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:d0:b8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'febc3a69-64cf-48c1-8399-147f35d89c61', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1773.990582] env[63371]: DEBUG oslo.service.loopingcall [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1773.990826] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1773.991082] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37628dd5-1d84-41dc-82f8-f5d6807a778f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.013533] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1774.013533] env[63371]: value = "task-1774739" [ 1774.013533] env[63371]: _type = "Task" [ 1774.013533] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.021446] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774739, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.049469] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1774.050462] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1774.050462] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Deleting the datastore file [datastore1] 943e2506-03a4-4633-b55b-381d9d8d9ef6 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1774.050462] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c344816f-20f7-45fb-8fe9-9f574ac6d523 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.058921] env[63371]: DEBUG oslo_vmware.api [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Waiting for the task: (returnval){ [ 1774.058921] env[63371]: value = "task-1774740" [ 1774.058921] env[63371]: _type = "Task" [ 1774.058921] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.067640] env[63371]: DEBUG oslo_vmware.api [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774740, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.129552] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.193146] env[63371]: DEBUG oslo_vmware.api [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774736, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.302713} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.193402] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1774.193586] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1774.193806] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1774.194055] env[63371]: INFO nova.compute.manager [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1774.194347] env[63371]: DEBUG oslo.service.loopingcall [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1774.194552] env[63371]: DEBUG nova.compute.manager [-] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1774.194647] env[63371]: DEBUG nova.network.neutron [-] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1774.199983] env[63371]: DEBUG nova.compute.manager [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Received event network-changed-febc3a69-64cf-48c1-8399-147f35d89c61 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1774.200216] env[63371]: DEBUG nova.compute.manager [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Refreshing instance network info cache due to event network-changed-febc3a69-64cf-48c1-8399-147f35d89c61. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1774.200497] env[63371]: DEBUG oslo_concurrency.lockutils [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] Acquiring lock "refresh_cache-da4839fa-8597-411c-b30c-0ac9226fec1f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.200722] env[63371]: DEBUG oslo_concurrency.lockutils [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] Acquired lock "refresh_cache-da4839fa-8597-411c-b30c-0ac9226fec1f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.200926] env[63371]: DEBUG nova.network.neutron [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Refreshing network info cache for port febc3a69-64cf-48c1-8399-147f35d89c61 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1774.226439] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774737, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.382951] env[63371]: DEBUG nova.scheduler.client.report [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 144 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1774.383244] env[63371]: DEBUG nova.compute.provider_tree [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 144 to 145 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1774.383420] env[63371]: DEBUG nova.compute.provider_tree [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1774.405146] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1774.524267] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774739, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.568633] env[63371]: DEBUG oslo_vmware.api [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Task: {'id': task-1774740, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270569} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.568915] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1774.569174] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1774.569388] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1774.569582] env[63371]: INFO nova.compute.manager [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1774.569827] env[63371]: DEBUG oslo.service.loopingcall [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1774.570032] env[63371]: DEBUG nova.compute.manager [-] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1774.570132] env[63371]: DEBUG nova.network.neutron [-] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1774.602568] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.602833] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.603080] env[63371]: DEBUG nova.network.neutron [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1774.727199] env[63371]: DEBUG oslo_vmware.api [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Task: {'id': task-1774737, 'name': PowerOnVM_Task, 'duration_secs': 0.616264} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.727517] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1774.727664] env[63371]: DEBUG nova.compute.manager [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1774.728474] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2a3021-3db1-4cea-87c3-1031f4cbe574 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.889283] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.475s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.889820] env[63371]: DEBUG nova.compute.manager [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1774.893463] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.078s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.893646] env[63371]: DEBUG nova.objects.instance [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1775.026342] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774739, 'name': CreateVM_Task, 'duration_secs': 0.776601} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.026675] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1775.027456] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.027785] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.028416] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1775.028862] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47d71a6c-5167-473c-983b-967fcf4cf2b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.034999] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1775.034999] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522b9e25-4120-b567-6cf5-d540372bc7f0" [ 1775.034999] env[63371]: _type = "Task" [ 1775.034999] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.042887] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522b9e25-4120-b567-6cf5-d540372bc7f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.124417] env[63371]: DEBUG nova.network.neutron [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Updated VIF entry in instance network info cache for port febc3a69-64cf-48c1-8399-147f35d89c61. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1775.124513] env[63371]: DEBUG nova.network.neutron [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Updating instance_info_cache with network_info: [{"id": "febc3a69-64cf-48c1-8399-147f35d89c61", "address": "fa:16:3e:49:d0:b8", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfebc3a69-64", "ovs_interfaceid": "febc3a69-64cf-48c1-8399-147f35d89c61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.199467] env[63371]: INFO nova.compute.manager [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Resuming [ 1775.200650] env[63371]: DEBUG nova.objects.instance [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lazy-loading 'flavor' on Instance uuid 9985dbcd-4498-4629-aae5-5e1933307c50 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1775.246406] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.259025] env[63371]: DEBUG nova.network.neutron [-] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.265672] env[63371]: DEBUG nova.compute.manager [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received event network-vif-deleted-4590c30b-effd-423f-b0b2-c208bbdfffd7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1775.265672] env[63371]: INFO nova.compute.manager [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Neutron deleted interface 4590c30b-effd-423f-b0b2-c208bbdfffd7; detaching it from the instance and deleting it from the info cache [ 1775.265672] env[63371]: DEBUG nova.network.neutron [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updating instance_info_cache with network_info: [{"id": "bc8b891d-040a-4a55-a281-311c08ae828d", "address": "fa:16:3e:ea:27:0c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8b891d-04", "ovs_interfaceid": "bc8b891d-040a-4a55-a281-311c08ae828d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "57835801-cbba-4176-8f6b-8d0ec76aa66e", "address": "fa:16:3e:29:75:d5", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap57835801-cb", "ovs_interfaceid": "57835801-cbba-4176-8f6b-8d0ec76aa66e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.398383] env[63371]: DEBUG nova.compute.utils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1775.402609] env[63371]: DEBUG nova.compute.manager [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1775.402881] env[63371]: DEBUG nova.network.neutron [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1775.408560] env[63371]: INFO nova.network.neutron [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Port 57835801-cbba-4176-8f6b-8d0ec76aa66e from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1775.408560] env[63371]: INFO nova.network.neutron [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Port 4590c30b-effd-423f-b0b2-c208bbdfffd7 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1775.408560] env[63371]: DEBUG nova.network.neutron [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updating instance_info_cache with network_info: [{"id": "bc8b891d-040a-4a55-a281-311c08ae828d", "address": "fa:16:3e:ea:27:0c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8b891d-04", "ovs_interfaceid": "bc8b891d-040a-4a55-a281-311c08ae828d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.450033] env[63371]: DEBUG nova.network.neutron [-] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.546172] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522b9e25-4120-b567-6cf5-d540372bc7f0, 'name': SearchDatastore_Task, 'duration_secs': 0.0111} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.546520] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.546759] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1775.546991] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.547159] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.547358] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1775.547609] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68bfd86f-119a-4aa1-87df-6ecfa1bd4963 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.557033] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1775.557224] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1775.557930] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e95098d0-4558-469c-927e-899d5a29899b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.565078] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1775.565078] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a54b68-6a80-5140-2df2-b815e1617d85" [ 1775.565078] env[63371]: _type = "Task" [ 1775.565078] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.572612] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a54b68-6a80-5140-2df2-b815e1617d85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.616216] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquiring lock "e16e4a55-4198-4308-b12c-d9ac07daecad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.616216] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lock "e16e4a55-4198-4308-b12c-d9ac07daecad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.616342] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquiring lock "e16e4a55-4198-4308-b12c-d9ac07daecad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.616515] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lock "e16e4a55-4198-4308-b12c-d9ac07daecad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.616679] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lock "e16e4a55-4198-4308-b12c-d9ac07daecad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.618897] env[63371]: INFO nova.compute.manager [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Terminating instance [ 1775.620583] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquiring lock "refresh_cache-e16e4a55-4198-4308-b12c-d9ac07daecad" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.620741] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquired lock "refresh_cache-e16e4a55-4198-4308-b12c-d9ac07daecad" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.620910] env[63371]: DEBUG nova.network.neutron [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1775.627464] env[63371]: DEBUG oslo_concurrency.lockutils [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] Releasing lock "refresh_cache-da4839fa-8597-411c-b30c-0ac9226fec1f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.627691] env[63371]: DEBUG nova.compute.manager [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Received event network-vif-deleted-c6418174-b2f5-4848-bc28-4fc4fc2fb439 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1775.627875] env[63371]: DEBUG nova.compute.manager [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received event network-vif-deleted-57835801-cbba-4176-8f6b-8d0ec76aa66e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1775.628051] env[63371]: INFO nova.compute.manager [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Neutron deleted interface 57835801-cbba-4176-8f6b-8d0ec76aa66e; detaching it from the instance and deleting it from the info cache [ 1775.628337] env[63371]: DEBUG nova.network.neutron [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updating instance_info_cache with network_info: [{"id": "bc8b891d-040a-4a55-a281-311c08ae828d", "address": "fa:16:3e:ea:27:0c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.231", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc8b891d-04", "ovs_interfaceid": "bc8b891d-040a-4a55-a281-311c08ae828d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4590c30b-effd-423f-b0b2-c208bbdfffd7", "address": "fa:16:3e:dd:c9:8c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4590c30b-ef", "ovs_interfaceid": "4590c30b-effd-423f-b0b2-c208bbdfffd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.723390] env[63371]: DEBUG nova.policy [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd36c9a2cfcbd49ffbedf01cc9b2fbc1d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '40f26a0147d245e59fa8a860280852e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1775.762957] env[63371]: INFO nova.compute.manager [-] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Took 1.57 seconds to deallocate network for instance. [ 1775.769580] env[63371]: DEBUG oslo_concurrency.lockutils [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Acquiring lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1775.769753] env[63371]: DEBUG oslo_concurrency.lockutils [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Acquired lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1775.770643] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdccf820-1cd2-4580-82b6-6b5fb477c086 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.795392] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "d00602b9-16bf-4c11-bc47-6076dddbf159" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.795938] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a459605-758c-499f-9950-d4060333fdbc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.831216] env[63371]: DEBUG nova.virt.vmwareapi.vmops [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Reconfiguring VM to detach interface {{(pid=63371) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1775.832192] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-137c2201-9352-47b3-9f5a-7a090e03f32a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.852454] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Waiting for the task: (returnval){ [ 1775.852454] env[63371]: value = "task-1774741" [ 1775.852454] env[63371]: _type = "Task" [ 1775.852454] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.861951] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.903550] env[63371]: DEBUG nova.compute.manager [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1775.911139] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3d03a97f-89c6-4900-b21e-122bc23704e8 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1775.911774] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1775.913756] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.498s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1775.915402] env[63371]: INFO nova.compute.claims [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1775.954055] env[63371]: INFO nova.compute.manager [-] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Took 1.38 seconds to deallocate network for instance. [ 1776.078383] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a54b68-6a80-5140-2df2-b815e1617d85, 'name': SearchDatastore_Task, 'duration_secs': 0.016175} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.079497] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88641cbc-620e-42ad-b568-66cb528dd13b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.084389] env[63371]: DEBUG nova.network.neutron [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Successfully created port: dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1776.087895] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1776.087895] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f8f3f7-7f7a-58c7-9edb-5b5e0c3bb0f3" [ 1776.087895] env[63371]: _type = "Task" [ 1776.087895] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.099080] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f8f3f7-7f7a-58c7-9edb-5b5e0c3bb0f3, 'name': SearchDatastore_Task, 'duration_secs': 0.010235} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.099396] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.099691] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] da4839fa-8597-411c-b30c-0ac9226fec1f/da4839fa-8597-411c-b30c-0ac9226fec1f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1776.099981] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d0be365-92e1-46d9-88f9-92e7788dc3b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.109755] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1776.109755] env[63371]: value = "task-1774742" [ 1776.109755] env[63371]: _type = "Task" [ 1776.109755] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.114880] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774742, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.131191] env[63371]: DEBUG oslo_concurrency.lockutils [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] Acquiring lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.149776] env[63371]: DEBUG nova.network.neutron [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1776.209546] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.209701] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquired lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1776.209870] env[63371]: DEBUG nova.network.neutron [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1776.246840] env[63371]: DEBUG nova.network.neutron [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.278221] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1776.368393] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.421117] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f906162a-ff3b-4037-a300-49b7faafec9b tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-d00602b9-16bf-4c11-bc47-6076dddbf159-57835801-cbba-4176-8f6b-8d0ec76aa66e" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.226s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1776.464384] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1776.483727] env[63371]: DEBUG nova.compute.manager [req-97ebb552-0a84-4d4c-9d89-17ed1758d82b req-1185d1e5-0755-457b-99e7-1a9c821c99f9 service nova] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Received event network-vif-deleted-8667cc0b-44ea-4ae9-8bf4-7ff37a4a7c92 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1776.484140] env[63371]: DEBUG nova.compute.manager [req-97ebb552-0a84-4d4c-9d89-17ed1758d82b req-1185d1e5-0755-457b-99e7-1a9c821c99f9 service nova] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Received event network-vif-deleted-5d79ab9c-2d92-460b-818d-59416391cc29 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1776.617611] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774742, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476921} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.617878] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] da4839fa-8597-411c-b30c-0ac9226fec1f/da4839fa-8597-411c-b30c-0ac9226fec1f.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1776.618152] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1776.618413] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0928371-206b-427e-9fda-878772407888 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.625596] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1776.625596] env[63371]: value = "task-1774743" [ 1776.625596] env[63371]: _type = "Task" [ 1776.625596] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.633317] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774743, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.749553] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Releasing lock "refresh_cache-e16e4a55-4198-4308-b12c-d9ac07daecad" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.750031] env[63371]: DEBUG nova.compute.manager [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1776.750231] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1776.751212] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c96785-0c1f-44a0-b56a-d0b0e7cd6600 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.761016] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1776.761296] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef19822e-a581-4641-8ec8-806837bf0969 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.767166] env[63371]: DEBUG oslo_vmware.api [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1776.767166] env[63371]: value = "task-1774744" [ 1776.767166] env[63371]: _type = "Task" [ 1776.767166] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.775155] env[63371]: DEBUG oslo_vmware.api [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774744, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.863055] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.918179] env[63371]: DEBUG nova.compute.manager [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1776.957878] env[63371]: DEBUG nova.virt.hardware [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1776.957878] env[63371]: DEBUG nova.virt.hardware [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1776.957878] env[63371]: DEBUG nova.virt.hardware [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1776.960071] env[63371]: DEBUG nova.virt.hardware [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1776.962126] env[63371]: DEBUG nova.virt.hardware [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1776.963043] env[63371]: DEBUG nova.virt.hardware [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1776.963043] env[63371]: DEBUG nova.virt.hardware [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1776.963043] env[63371]: DEBUG nova.virt.hardware [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1776.963043] env[63371]: DEBUG nova.virt.hardware [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1776.963212] env[63371]: DEBUG nova.virt.hardware [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1776.963280] env[63371]: DEBUG nova.virt.hardware [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1776.964227] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c87eec8-3e64-40bc-8def-b35032e893b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.978137] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3aa249-c992-4a17-b283-98950a9ab598 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.985283] env[63371]: DEBUG nova.network.neutron [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updating instance_info_cache with network_info: [{"id": "d3f41a80-52de-46a5-ac15-9a26e6710908", "address": "fa:16:3e:f6:cd:6b", "network": {"id": "9c25e5e9-468d-4d4c-93e0-c9815eff1c2e", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-814005109-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2e693d73d70140c2ba065de2b60838c2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7514a465-f1a4-4a8b-b76b-726b1a9d7e2f", "external-id": "nsx-vlan-transportzone-36", "segmentation_id": 36, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd3f41a80-52", "ovs_interfaceid": "d3f41a80-52de-46a5-ac15-9a26e6710908", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.134765] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774743, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080089} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.134938] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1777.137684] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecca5e0-af89-47d8-8793-037c0820e569 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.160370] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] da4839fa-8597-411c-b30c-0ac9226fec1f/da4839fa-8597-411c-b30c-0ac9226fec1f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1777.162998] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67c45065-ebe7-4e4e-b6c9-4edd8ac4fa9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.182559] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1777.182559] env[63371]: value = "task-1774745" [ 1777.182559] env[63371]: _type = "Task" [ 1777.182559] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.191986] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774745, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.244966] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e448dc-3abe-4e25-b33f-67dd9152ccdc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.252375] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-423aee65-50ca-43b5-a45a-9db5f55c5e16 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.285638] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-069755a3-d039-4977-b140-b793de1e3727 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.296063] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd0a62b-c57b-4242-9d83-336bfc77ad45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.299814] env[63371]: DEBUG oslo_vmware.api [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774744, 'name': PowerOffVM_Task, 'duration_secs': 0.187043} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.300095] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1777.300326] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1777.300912] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01cd334c-80d3-41a7-9260-1b149911b931 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.313669] env[63371]: DEBUG nova.compute.provider_tree [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1777.324698] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1777.324698] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1777.324698] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Deleting the datastore file [datastore1] e16e4a55-4198-4308-b12c-d9ac07daecad {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1777.324828] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8460b788-e460-4a63-aca5-fb62b97f5e64 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.332030] env[63371]: DEBUG oslo_vmware.api [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for the task: (returnval){ [ 1777.332030] env[63371]: value = "task-1774747" [ 1777.332030] env[63371]: _type = "Task" [ 1777.332030] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.340017] env[63371]: DEBUG oslo_vmware.api [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774747, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.365197] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.488215] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Releasing lock "refresh_cache-9985dbcd-4498-4629-aae5-5e1933307c50" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.489273] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4081f914-5e59-451e-962c-25705d0f7c4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.496297] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Resuming the VM {{(pid=63371) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1777.497021] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f644c0df-e313-439f-8ead-d9b1c5c6f154 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.506352] env[63371]: DEBUG oslo_vmware.api [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1777.506352] env[63371]: value = "task-1774748" [ 1777.506352] env[63371]: _type = "Task" [ 1777.506352] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.515572] env[63371]: DEBUG oslo_vmware.api [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774748, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.555855] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.557046] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.667451] env[63371]: DEBUG nova.compute.manager [req-bde0b16d-0911-4ef6-9d96-1aff82270f19 req-aea7e78f-9c5e-477f-bc85-26f7a4201726 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Received event network-vif-plugged-dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1777.667699] env[63371]: DEBUG oslo_concurrency.lockutils [req-bde0b16d-0911-4ef6-9d96-1aff82270f19 req-aea7e78f-9c5e-477f-bc85-26f7a4201726 service nova] Acquiring lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.667907] env[63371]: DEBUG oslo_concurrency.lockutils [req-bde0b16d-0911-4ef6-9d96-1aff82270f19 req-aea7e78f-9c5e-477f-bc85-26f7a4201726 service nova] Lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.668097] env[63371]: DEBUG oslo_concurrency.lockutils [req-bde0b16d-0911-4ef6-9d96-1aff82270f19 req-aea7e78f-9c5e-477f-bc85-26f7a4201726 service nova] Lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.668279] env[63371]: DEBUG nova.compute.manager [req-bde0b16d-0911-4ef6-9d96-1aff82270f19 req-aea7e78f-9c5e-477f-bc85-26f7a4201726 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] No waiting events found dispatching network-vif-plugged-dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1777.668444] env[63371]: WARNING nova.compute.manager [req-bde0b16d-0911-4ef6-9d96-1aff82270f19 req-aea7e78f-9c5e-477f-bc85-26f7a4201726 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Received unexpected event network-vif-plugged-dd28ee6f-5efa-4009-842b-c1c9af10f8ea for instance with vm_state building and task_state spawning. [ 1777.693100] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774745, 'name': ReconfigVM_Task, 'duration_secs': 0.297341} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.693388] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Reconfigured VM instance instance-00000062 to attach disk [datastore1] da4839fa-8597-411c-b30c-0ac9226fec1f/da4839fa-8597-411c-b30c-0ac9226fec1f.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1777.694057] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-110667b6-bdfa-42bd-9b80-54226d228f19 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.700880] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1777.700880] env[63371]: value = "task-1774749" [ 1777.700880] env[63371]: _type = "Task" [ 1777.700880] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.710426] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774749, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.806418] env[63371]: DEBUG nova.network.neutron [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Successfully updated port: dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1777.816585] env[63371]: DEBUG nova.scheduler.client.report [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1777.843167] env[63371]: DEBUG oslo_vmware.api [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Task: {'id': task-1774747, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.089966} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.843422] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1777.843613] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1777.843884] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1777.844092] env[63371]: INFO nova.compute.manager [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1777.844337] env[63371]: DEBUG oslo.service.loopingcall [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1777.844545] env[63371]: DEBUG nova.compute.manager [-] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1777.844639] env[63371]: DEBUG nova.network.neutron [-] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1777.862882] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.863623] env[63371]: DEBUG nova.network.neutron [-] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1778.014064] env[63371]: DEBUG oslo_vmware.api [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774748, 'name': PowerOnVM_Task, 'duration_secs': 0.486889} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.014293] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Resumed the VM {{(pid=63371) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1778.014493] env[63371]: DEBUG nova.compute.manager [None req-a644c729-96f1-413d-9b77-3d54bd185ecb tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1778.016044] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa5bcc2-cd74-4cb3-aad7-b70fd95dd733 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.058619] env[63371]: DEBUG nova.compute.manager [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1778.213893] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774749, 'name': Rename_Task, 'duration_secs': 0.180895} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.214210] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1778.214477] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-960e252e-751d-4f74-ba26-f0bb91699152 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.226040] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1778.226040] env[63371]: value = "task-1774750" [ 1778.226040] env[63371]: _type = "Task" [ 1778.226040] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.237984] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774750, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.309110] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquiring lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1778.309264] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquired lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1778.309605] env[63371]: DEBUG nova.network.neutron [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1778.323512] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.323512] env[63371]: DEBUG nova.compute.manager [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1778.325697] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.782s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1778.329047] env[63371]: INFO nova.compute.claims [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1778.374444] env[63371]: DEBUG nova.network.neutron [-] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1778.376538] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.583411] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.734848] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774750, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.832666] env[63371]: DEBUG nova.compute.utils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1778.834478] env[63371]: DEBUG nova.compute.manager [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1778.834684] env[63371]: DEBUG nova.network.neutron [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1778.862957] env[63371]: DEBUG nova.network.neutron [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1778.868526] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.880041] env[63371]: INFO nova.compute.manager [-] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Took 1.03 seconds to deallocate network for instance. [ 1778.886856] env[63371]: DEBUG nova.policy [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '374f8ba08f014d1799d6488ca91bbed1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1e726c7c7ed444e9b106c8b9c1c86835', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1779.065051] env[63371]: DEBUG nova.network.neutron [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Updating instance_info_cache with network_info: [{"id": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "address": "fa:16:3e:d1:7d:4b", "network": {"id": "1f2162b2-ad92-4b6d-9e1c-02a0d65dd460", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-529696742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40f26a0147d245e59fa8a860280852e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd28ee6f-5e", "ovs_interfaceid": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1779.211656] env[63371]: DEBUG nova.network.neutron [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Successfully created port: d94a7cdb-218d-45c4-98f7-d395d584d9c7 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1779.236083] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774750, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.337331] env[63371]: DEBUG nova.compute.manager [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1779.370546] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.385475] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.569537] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Releasing lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.569846] env[63371]: DEBUG nova.compute.manager [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Instance network_info: |[{"id": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "address": "fa:16:3e:d1:7d:4b", "network": {"id": "1f2162b2-ad92-4b6d-9e1c-02a0d65dd460", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-529696742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40f26a0147d245e59fa8a860280852e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd28ee6f-5e", "ovs_interfaceid": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1779.570513] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:7d:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04e15990-16e1-4cb2-b0f0-06c362e68c5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd28ee6f-5efa-4009-842b-c1c9af10f8ea', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1779.578132] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Creating folder: Project (40f26a0147d245e59fa8a860280852e3). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1779.578421] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-094aaed6-918d-40a9-8b3b-980a20d88a0a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.590219] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Created folder: Project (40f26a0147d245e59fa8a860280852e3) in parent group-v368199. [ 1779.590219] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Creating folder: Instances. Parent ref: group-v368468. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1779.590219] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e2448bf7-3303-49e3-9491-59501ff6c9f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.601627] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Created folder: Instances in parent group-v368468. [ 1779.601627] env[63371]: DEBUG oslo.service.loopingcall [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1779.601627] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1779.601627] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42759e74-8d7d-4b79-92b1-8391ae06573d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.630221] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1779.630221] env[63371]: value = "task-1774753" [ 1779.630221] env[63371]: _type = "Task" [ 1779.630221] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.640527] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774753, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.691802] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ffb076-04a1-4c93-9dd4-05e1fe3328f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.699505] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31451b56-4a72-4983-91ef-0a3c418df916 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.733986] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ddf1a56-3d00-468f-a91b-5f57ba959e81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.739710] env[63371]: DEBUG nova.compute.manager [req-17ab6692-ff61-4ac4-9f9a-7cf9f668bd44 req-f308df64-e711-41ff-8597-cf8156f4ae9f service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Received event network-changed-dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1779.739909] env[63371]: DEBUG nova.compute.manager [req-17ab6692-ff61-4ac4-9f9a-7cf9f668bd44 req-f308df64-e711-41ff-8597-cf8156f4ae9f service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Refreshing instance network info cache due to event network-changed-dd28ee6f-5efa-4009-842b-c1c9af10f8ea. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1779.740141] env[63371]: DEBUG oslo_concurrency.lockutils [req-17ab6692-ff61-4ac4-9f9a-7cf9f668bd44 req-f308df64-e711-41ff-8597-cf8156f4ae9f service nova] Acquiring lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.740308] env[63371]: DEBUG oslo_concurrency.lockutils [req-17ab6692-ff61-4ac4-9f9a-7cf9f668bd44 req-f308df64-e711-41ff-8597-cf8156f4ae9f service nova] Acquired lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.740438] env[63371]: DEBUG nova.network.neutron [req-17ab6692-ff61-4ac4-9f9a-7cf9f668bd44 req-f308df64-e711-41ff-8597-cf8156f4ae9f service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Refreshing network info cache for port dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1779.746711] env[63371]: DEBUG oslo_vmware.api [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774750, 'name': PowerOnVM_Task, 'duration_secs': 1.310553} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.746897] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1779.747102] env[63371]: INFO nova.compute.manager [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Took 8.31 seconds to spawn the instance on the hypervisor. [ 1779.747280] env[63371]: DEBUG nova.compute.manager [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1779.748505] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543d2b0c-c563-47a1-a464-ba4cd5d2f71c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.753308] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef651771-d824-489a-b9a3-d35b9c6c65ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.770111] env[63371]: DEBUG nova.compute.provider_tree [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1779.869269] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.139533] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774753, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.277925] env[63371]: DEBUG nova.scheduler.client.report [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1780.285862] env[63371]: INFO nova.compute.manager [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Took 23.41 seconds to build instance. [ 1780.348911] env[63371]: DEBUG nova.compute.manager [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1780.368068] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.383475] env[63371]: DEBUG nova.virt.hardware [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1780.383764] env[63371]: DEBUG nova.virt.hardware [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1780.383941] env[63371]: DEBUG nova.virt.hardware [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1780.384142] env[63371]: DEBUG nova.virt.hardware [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1780.384288] env[63371]: DEBUG nova.virt.hardware [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1780.384434] env[63371]: DEBUG nova.virt.hardware [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1780.384637] env[63371]: DEBUG nova.virt.hardware [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1780.384858] env[63371]: DEBUG nova.virt.hardware [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1780.385085] env[63371]: DEBUG nova.virt.hardware [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1780.385254] env[63371]: DEBUG nova.virt.hardware [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1780.385429] env[63371]: DEBUG nova.virt.hardware [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1780.386334] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a238db0-36fb-4055-94dd-0644005f6818 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.394793] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b38262-1c2b-4750-927d-4c11a8b9b9e4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.524814] env[63371]: DEBUG nova.network.neutron [req-17ab6692-ff61-4ac4-9f9a-7cf9f668bd44 req-f308df64-e711-41ff-8597-cf8156f4ae9f service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Updated VIF entry in instance network info cache for port dd28ee6f-5efa-4009-842b-c1c9af10f8ea. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1780.525205] env[63371]: DEBUG nova.network.neutron [req-17ab6692-ff61-4ac4-9f9a-7cf9f668bd44 req-f308df64-e711-41ff-8597-cf8156f4ae9f service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Updating instance_info_cache with network_info: [{"id": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "address": "fa:16:3e:d1:7d:4b", "network": {"id": "1f2162b2-ad92-4b6d-9e1c-02a0d65dd460", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-529696742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40f26a0147d245e59fa8a860280852e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd28ee6f-5e", "ovs_interfaceid": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.641624] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774753, 'name': CreateVM_Task, 'duration_secs': 0.510867} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.641923] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1780.642925] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1780.643573] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1780.643754] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1780.644161] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7e11d78-fbf3-4b83-a8b8-42f74d93f9a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.650722] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for the task: (returnval){ [ 1780.650722] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52230ba6-c1f6-fafe-5ea8-2348edba6833" [ 1780.650722] env[63371]: _type = "Task" [ 1780.650722] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.658982] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52230ba6-c1f6-fafe-5ea8-2348edba6833, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.784773] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.459s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.785577] env[63371]: DEBUG nova.compute.manager [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1780.789506] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 15.087s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.791017] env[63371]: DEBUG oslo_concurrency.lockutils [None req-78deabbe-1324-4405-9acf-779efb507d15 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.920s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.859089] env[63371]: DEBUG nova.compute.manager [req-9308b1bc-9edd-4bc7-959f-62884fb5fec8 req-d7f79733-3db8-4c8d-a5db-c64f5a814560 service nova] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Received event network-vif-plugged-d94a7cdb-218d-45c4-98f7-d395d584d9c7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1780.859276] env[63371]: DEBUG oslo_concurrency.lockutils [req-9308b1bc-9edd-4bc7-959f-62884fb5fec8 req-d7f79733-3db8-4c8d-a5db-c64f5a814560 service nova] Acquiring lock "6c2edb87-7a36-4814-ac4a-199cdca1ef68-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.859463] env[63371]: DEBUG oslo_concurrency.lockutils [req-9308b1bc-9edd-4bc7-959f-62884fb5fec8 req-d7f79733-3db8-4c8d-a5db-c64f5a814560 service nova] Lock "6c2edb87-7a36-4814-ac4a-199cdca1ef68-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.859631] env[63371]: DEBUG oslo_concurrency.lockutils [req-9308b1bc-9edd-4bc7-959f-62884fb5fec8 req-d7f79733-3db8-4c8d-a5db-c64f5a814560 service nova] Lock "6c2edb87-7a36-4814-ac4a-199cdca1ef68-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.859797] env[63371]: DEBUG nova.compute.manager [req-9308b1bc-9edd-4bc7-959f-62884fb5fec8 req-d7f79733-3db8-4c8d-a5db-c64f5a814560 service nova] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] No waiting events found dispatching network-vif-plugged-d94a7cdb-218d-45c4-98f7-d395d584d9c7 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1780.862571] env[63371]: WARNING nova.compute.manager [req-9308b1bc-9edd-4bc7-959f-62884fb5fec8 req-d7f79733-3db8-4c8d-a5db-c64f5a814560 service nova] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Received unexpected event network-vif-plugged-d94a7cdb-218d-45c4-98f7-d395d584d9c7 for instance with vm_state building and task_state spawning. [ 1780.873704] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.925480] env[63371]: DEBUG nova.network.neutron [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Successfully updated port: d94a7cdb-218d-45c4-98f7-d395d584d9c7 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1781.028134] env[63371]: DEBUG oslo_concurrency.lockutils [req-17ab6692-ff61-4ac4-9f9a-7cf9f668bd44 req-f308df64-e711-41ff-8597-cf8156f4ae9f service nova] Releasing lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.163876] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52230ba6-c1f6-fafe-5ea8-2348edba6833, 'name': SearchDatastore_Task, 'duration_secs': 0.009505} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.164211] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.164736] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1781.165055] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.165235] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.165427] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1781.165698] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f361752-d7fa-412b-8299-c9874156b0c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.174517] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1781.174701] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1781.175788] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84fcc1e8-babb-44fb-9a98-2054df1687b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.182467] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for the task: (returnval){ [ 1781.182467] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528764bf-d026-5001-ac91-d87a9f8021b4" [ 1781.182467] env[63371]: _type = "Task" [ 1781.182467] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.191610] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528764bf-d026-5001-ac91-d87a9f8021b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.297695] env[63371]: DEBUG nova.compute.utils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1781.302105] env[63371]: DEBUG nova.compute.manager [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1781.302281] env[63371]: DEBUG nova.network.neutron [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1781.355683] env[63371]: DEBUG nova.policy [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '739c32320b904c76ace3302c40b1d627', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5942787fa9584e8fbf5ddd459907ce5d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1781.373423] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.430009] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Acquiring lock "refresh_cache-6c2edb87-7a36-4814-ac4a-199cdca1ef68" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.430202] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Acquired lock "refresh_cache-6c2edb87-7a36-4814-ac4a-199cdca1ef68" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.430517] env[63371]: DEBUG nova.network.neutron [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1781.648866] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99de759d-d720-4e8f-96d2-e4ad6269e1c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.652190] env[63371]: DEBUG nova.network.neutron [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Successfully created port: e4ee0c90-4a70-4f4e-b976-34412c13da2f {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1781.659417] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a459bd34-39a9-42ef-a838-8f2d1f223ae8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.692021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb90b2b8-df0e-47ff-8811-3de959e1e5cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.702585] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a60705a-11c6-4089-ac8c-63213cb7ef8d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.706626] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528764bf-d026-5001-ac91-d87a9f8021b4, 'name': SearchDatastore_Task, 'duration_secs': 0.009546} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.707753] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e8c6e20-2862-4747-97c0-ea7997acf459 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.721308] env[63371]: DEBUG nova.compute.provider_tree [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1781.726179] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for the task: (returnval){ [ 1781.726179] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b6d9d4-976e-c7d4-ad79-96780b52eb91" [ 1781.726179] env[63371]: _type = "Task" [ 1781.726179] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.734179] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b6d9d4-976e-c7d4-ad79-96780b52eb91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.768627] env[63371]: DEBUG nova.compute.manager [req-9149032d-8656-4578-927c-5f6808c7090f req-5d96b72b-34e2-4300-90df-29ff7f262efa service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Received event network-changed-febc3a69-64cf-48c1-8399-147f35d89c61 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1781.768836] env[63371]: DEBUG nova.compute.manager [req-9149032d-8656-4578-927c-5f6808c7090f req-5d96b72b-34e2-4300-90df-29ff7f262efa service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Refreshing instance network info cache due to event network-changed-febc3a69-64cf-48c1-8399-147f35d89c61. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1781.769262] env[63371]: DEBUG oslo_concurrency.lockutils [req-9149032d-8656-4578-927c-5f6808c7090f req-5d96b72b-34e2-4300-90df-29ff7f262efa service nova] Acquiring lock "refresh_cache-da4839fa-8597-411c-b30c-0ac9226fec1f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.769536] env[63371]: DEBUG oslo_concurrency.lockutils [req-9149032d-8656-4578-927c-5f6808c7090f req-5d96b72b-34e2-4300-90df-29ff7f262efa service nova] Acquired lock "refresh_cache-da4839fa-8597-411c-b30c-0ac9226fec1f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.769638] env[63371]: DEBUG nova.network.neutron [req-9149032d-8656-4578-927c-5f6808c7090f req-5d96b72b-34e2-4300-90df-29ff7f262efa service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Refreshing network info cache for port febc3a69-64cf-48c1-8399-147f35d89c61 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1781.802276] env[63371]: DEBUG nova.compute.manager [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1781.873936] env[63371]: DEBUG oslo_vmware.api [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Task: {'id': task-1774741, 'name': ReconfigVM_Task, 'duration_secs': 5.851335} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.874405] env[63371]: DEBUG oslo_concurrency.lockutils [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] Releasing lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.874744] env[63371]: DEBUG nova.virt.vmwareapi.vmops [req-0086713a-5a6b-44d0-b19c-afe95aee8d3d req-4a95e071-fc6f-4ea6-97d8-5448fe6f01a7 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Reconfigured VM to detach interface {{(pid=63371) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1781.875367] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.081s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.875725] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "d00602b9-16bf-4c11-bc47-6076dddbf159-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1781.876231] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1781.877815] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1781.879509] env[63371]: INFO nova.compute.manager [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Terminating instance [ 1781.881537] env[63371]: DEBUG nova.compute.manager [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1781.881842] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1781.882839] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27c6dfb-9d06-4cc9-bc17-756fce1e25b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.892627] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1781.892627] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6fde0409-e254-437e-ba64-39e55568b4e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.901019] env[63371]: DEBUG oslo_vmware.api [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1781.901019] env[63371]: value = "task-1774754" [ 1781.901019] env[63371]: _type = "Task" [ 1781.901019] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.908095] env[63371]: DEBUG oslo_vmware.api [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774754, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.965754] env[63371]: DEBUG nova.network.neutron [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1782.140931] env[63371]: DEBUG nova.network.neutron [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Updating instance_info_cache with network_info: [{"id": "d94a7cdb-218d-45c4-98f7-d395d584d9c7", "address": "fa:16:3e:76:87:ec", "network": {"id": "d8737f46-512f-49f8-8fb0-d711df81c735", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1534365140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e726c7c7ed444e9b106c8b9c1c86835", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c883fb98-d172-4510-8cf4-07aafdf771af", "external-id": "nsx-vlan-transportzone-570", "segmentation_id": 570, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd94a7cdb-21", "ovs_interfaceid": "d94a7cdb-218d-45c4-98f7-d395d584d9c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.226031] env[63371]: DEBUG nova.scheduler.client.report [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1782.238597] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b6d9d4-976e-c7d4-ad79-96780b52eb91, 'name': SearchDatastore_Task, 'duration_secs': 0.009473} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.238862] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.239138] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7/3b2ed231-9f9c-4d28-9c81-034c2d17c9a7.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1782.239987] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cab3bda4-22a3-48a2-823b-b2159fa8937e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.247578] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for the task: (returnval){ [ 1782.247578] env[63371]: value = "task-1774755" [ 1782.247578] env[63371]: _type = "Task" [ 1782.247578] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.256235] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.408522] env[63371]: DEBUG oslo_vmware.api [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774754, 'name': PowerOffVM_Task, 'duration_secs': 0.199478} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.408810] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1782.409032] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1782.409408] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-497f29b5-3e23-4930-9aa7-3d018be766c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.528503] env[63371]: DEBUG nova.network.neutron [req-9149032d-8656-4578-927c-5f6808c7090f req-5d96b72b-34e2-4300-90df-29ff7f262efa service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Updated VIF entry in instance network info cache for port febc3a69-64cf-48c1-8399-147f35d89c61. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1782.528912] env[63371]: DEBUG nova.network.neutron [req-9149032d-8656-4578-927c-5f6808c7090f req-5d96b72b-34e2-4300-90df-29ff7f262efa service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Updating instance_info_cache with network_info: [{"id": "febc3a69-64cf-48c1-8399-147f35d89c61", "address": "fa:16:3e:49:d0:b8", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfebc3a69-64", "ovs_interfaceid": "febc3a69-64cf-48c1-8399-147f35d89c61", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.548383] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1782.548624] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1782.548825] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleting the datastore file [datastore1] d00602b9-16bf-4c11-bc47-6076dddbf159 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1782.549155] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-16d5ae1b-54e2-4e88-a2ab-351c5dea5987 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.558907] env[63371]: DEBUG oslo_vmware.api [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1782.558907] env[63371]: value = "task-1774757" [ 1782.558907] env[63371]: _type = "Task" [ 1782.558907] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.573347] env[63371]: DEBUG oslo_vmware.api [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774757, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.644313] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Releasing lock "refresh_cache-6c2edb87-7a36-4814-ac4a-199cdca1ef68" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.644741] env[63371]: DEBUG nova.compute.manager [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Instance network_info: |[{"id": "d94a7cdb-218d-45c4-98f7-d395d584d9c7", "address": "fa:16:3e:76:87:ec", "network": {"id": "d8737f46-512f-49f8-8fb0-d711df81c735", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1534365140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e726c7c7ed444e9b106c8b9c1c86835", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c883fb98-d172-4510-8cf4-07aafdf771af", "external-id": "nsx-vlan-transportzone-570", "segmentation_id": 570, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd94a7cdb-21", "ovs_interfaceid": "d94a7cdb-218d-45c4-98f7-d395d584d9c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1782.645286] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:87:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c883fb98-d172-4510-8cf4-07aafdf771af', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd94a7cdb-218d-45c4-98f7-d395d584d9c7', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1782.654428] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Creating folder: Project (1e726c7c7ed444e9b106c8b9c1c86835). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1782.654798] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8ca7efe-59ca-41ac-a2ae-e12eaa00535b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.667311] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Created folder: Project (1e726c7c7ed444e9b106c8b9c1c86835) in parent group-v368199. [ 1782.667522] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Creating folder: Instances. Parent ref: group-v368471. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1782.667786] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a82170e-afb3-4bdc-a962-58a3e8428623 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.679098] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Created folder: Instances in parent group-v368471. [ 1782.679098] env[63371]: DEBUG oslo.service.loopingcall [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1782.679098] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1782.679098] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc74e1db-2ecb-445e-83a0-bb55347d9157 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.699852] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1782.699852] env[63371]: value = "task-1774760" [ 1782.699852] env[63371]: _type = "Task" [ 1782.699852] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.709783] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774760, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.759750] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774755, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.812032] env[63371]: DEBUG nova.compute.manager [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1782.839557] env[63371]: DEBUG nova.virt.hardware [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1782.839843] env[63371]: DEBUG nova.virt.hardware [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1782.840028] env[63371]: DEBUG nova.virt.hardware [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1782.840230] env[63371]: DEBUG nova.virt.hardware [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1782.840402] env[63371]: DEBUG nova.virt.hardware [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1782.840566] env[63371]: DEBUG nova.virt.hardware [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1782.840799] env[63371]: DEBUG nova.virt.hardware [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1782.841022] env[63371]: DEBUG nova.virt.hardware [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1782.841216] env[63371]: DEBUG nova.virt.hardware [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1782.841399] env[63371]: DEBUG nova.virt.hardware [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1782.841596] env[63371]: DEBUG nova.virt.hardware [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1782.842790] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ad7ce5-ce9a-467f-b531-b15bf62bc129 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.851482] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a2c4f9a-059a-4298-9eed-676e9ab480b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.887968] env[63371]: DEBUG nova.compute.manager [req-32f10f31-809c-4306-aa3c-e45d0eed20a9 req-44f91b30-03ac-433c-a63b-504be412ec0c service nova] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Received event network-changed-d94a7cdb-218d-45c4-98f7-d395d584d9c7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1782.888285] env[63371]: DEBUG nova.compute.manager [req-32f10f31-809c-4306-aa3c-e45d0eed20a9 req-44f91b30-03ac-433c-a63b-504be412ec0c service nova] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Refreshing instance network info cache due to event network-changed-d94a7cdb-218d-45c4-98f7-d395d584d9c7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1782.888591] env[63371]: DEBUG oslo_concurrency.lockutils [req-32f10f31-809c-4306-aa3c-e45d0eed20a9 req-44f91b30-03ac-433c-a63b-504be412ec0c service nova] Acquiring lock "refresh_cache-6c2edb87-7a36-4814-ac4a-199cdca1ef68" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1782.888684] env[63371]: DEBUG oslo_concurrency.lockutils [req-32f10f31-809c-4306-aa3c-e45d0eed20a9 req-44f91b30-03ac-433c-a63b-504be412ec0c service nova] Acquired lock "refresh_cache-6c2edb87-7a36-4814-ac4a-199cdca1ef68" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1782.888874] env[63371]: DEBUG nova.network.neutron [req-32f10f31-809c-4306-aa3c-e45d0eed20a9 req-44f91b30-03ac-433c-a63b-504be412ec0c service nova] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Refreshing network info cache for port d94a7cdb-218d-45c4-98f7-d395d584d9c7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1783.031991] env[63371]: DEBUG oslo_concurrency.lockutils [req-9149032d-8656-4578-927c-5f6808c7090f req-5d96b72b-34e2-4300-90df-29ff7f262efa service nova] Releasing lock "refresh_cache-da4839fa-8597-411c-b30c-0ac9226fec1f" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.068908] env[63371]: DEBUG oslo_vmware.api [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774757, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.356305} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.069205] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1783.069395] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1783.069550] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1783.069719] env[63371]: INFO nova.compute.manager [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1783.069957] env[63371]: DEBUG oslo.service.loopingcall [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1783.070157] env[63371]: DEBUG nova.compute.manager [-] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1783.070244] env[63371]: DEBUG nova.network.neutron [-] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1783.117084] env[63371]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 4590c30b-effd-423f-b0b2-c208bbdfffd7 could not be found.", "detail": ""}} {{(pid=63371) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1783.117406] env[63371]: DEBUG nova.network.neutron [-] Unable to show port 4590c30b-effd-423f-b0b2-c208bbdfffd7 as it no longer exists. {{(pid=63371) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1783.211822] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774760, 'name': CreateVM_Task, 'duration_secs': 0.426199} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.212239] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1783.213517] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.213978] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.216107] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1783.216107] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b337fa8-2d53-4c0f-a49c-cfda9d0f7d45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.220606] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Waiting for the task: (returnval){ [ 1783.220606] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5211cbfe-7dd8-e6c4-c146-34649382e46b" [ 1783.220606] env[63371]: _type = "Task" [ 1783.220606] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.232087] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5211cbfe-7dd8-e6c4-c146-34649382e46b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.241582] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.450s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.245133] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.805s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.246631] env[63371]: INFO nova.compute.claims [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1783.259083] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527135} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.259404] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7/3b2ed231-9f9c-4d28-9c81-034c2d17c9a7.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1783.259652] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1783.259957] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-83f52163-e3fd-4c1e-902c-ef3f34328152 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.269031] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for the task: (returnval){ [ 1783.269031] env[63371]: value = "task-1774761" [ 1783.269031] env[63371]: _type = "Task" [ 1783.269031] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.277045] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774761, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.295717] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "9985dbcd-4498-4629-aae5-5e1933307c50" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.296493] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "9985dbcd-4498-4629-aae5-5e1933307c50" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.296794] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "9985dbcd-4498-4629-aae5-5e1933307c50-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1783.297145] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "9985dbcd-4498-4629-aae5-5e1933307c50-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1783.297431] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "9985dbcd-4498-4629-aae5-5e1933307c50-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1783.300618] env[63371]: INFO nova.compute.manager [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Terminating instance [ 1783.303616] env[63371]: DEBUG nova.compute.manager [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1783.303936] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1783.305435] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1458a5cf-84c9-42bb-b790-c5f2895f01bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.315754] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1783.315902] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a92592f4-e63e-48d6-ab75-f2fbe87325be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.323399] env[63371]: DEBUG oslo_vmware.api [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1783.323399] env[63371]: value = "task-1774762" [ 1783.323399] env[63371]: _type = "Task" [ 1783.323399] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.332766] env[63371]: DEBUG oslo_vmware.api [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774762, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.341458] env[63371]: DEBUG nova.network.neutron [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Successfully updated port: e4ee0c90-4a70-4f4e-b976-34412c13da2f {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1783.647610] env[63371]: DEBUG nova.network.neutron [req-32f10f31-809c-4306-aa3c-e45d0eed20a9 req-44f91b30-03ac-433c-a63b-504be412ec0c service nova] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Updated VIF entry in instance network info cache for port d94a7cdb-218d-45c4-98f7-d395d584d9c7. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1783.647610] env[63371]: DEBUG nova.network.neutron [req-32f10f31-809c-4306-aa3c-e45d0eed20a9 req-44f91b30-03ac-433c-a63b-504be412ec0c service nova] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Updating instance_info_cache with network_info: [{"id": "d94a7cdb-218d-45c4-98f7-d395d584d9c7", "address": "fa:16:3e:76:87:ec", "network": {"id": "d8737f46-512f-49f8-8fb0-d711df81c735", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1534365140-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1e726c7c7ed444e9b106c8b9c1c86835", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c883fb98-d172-4510-8cf4-07aafdf771af", "external-id": "nsx-vlan-transportzone-570", "segmentation_id": 570, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd94a7cdb-21", "ovs_interfaceid": "d94a7cdb-218d-45c4-98f7-d395d584d9c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1783.731389] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5211cbfe-7dd8-e6c4-c146-34649382e46b, 'name': SearchDatastore_Task, 'duration_secs': 0.045731} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.731685] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.731916] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1783.732160] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.732306] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.732509] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1783.732820] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0cf834b-5371-4d73-8088-1cadcf45e4d5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.741143] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1783.741324] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1783.742046] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8c7c030-e1dd-4c1b-acaa-64cb48f70a1c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.748625] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Waiting for the task: (returnval){ [ 1783.748625] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52556a4f-d123-fa03-ec7c-ef31228a1296" [ 1783.748625] env[63371]: _type = "Task" [ 1783.748625] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.768024] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52556a4f-d123-fa03-ec7c-ef31228a1296, 'name': SearchDatastore_Task, 'duration_secs': 0.010556} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.768024] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b3dcdf8-3322-408c-8f93-e0c07e56df0a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.780480] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774761, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.184076} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.780885] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Waiting for the task: (returnval){ [ 1783.780885] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ac3f1f-8daf-8447-0667-3318a1958d3e" [ 1783.780885] env[63371]: _type = "Task" [ 1783.780885] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.781050] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1783.782091] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8fdaa9d-1a2b-4941-ad0d-834025b1470f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.794159] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ac3f1f-8daf-8447-0667-3318a1958d3e, 'name': SearchDatastore_Task, 'duration_secs': 0.011343} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.803699] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.804007] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 6c2edb87-7a36-4814-ac4a-199cdca1ef68/6c2edb87-7a36-4814-ac4a-199cdca1ef68.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1783.813152] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7/3b2ed231-9f9c-4d28-9c81-034c2d17c9a7.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1783.814781] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d6e5cfb2-0929-47f5-baa1-5f4f7241cda1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.817295] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f069f3f-ab45-4f3a-8611-5608bc74670f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.833405] env[63371]: DEBUG nova.compute.manager [req-28bfcf4c-3450-436b-a459-53e103c593e1 req-6622e856-5260-472c-8d6a-7ec05c4bb0e4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Received event network-vif-deleted-bc8b891d-040a-4a55-a281-311c08ae828d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1783.833599] env[63371]: INFO nova.compute.manager [req-28bfcf4c-3450-436b-a459-53e103c593e1 req-6622e856-5260-472c-8d6a-7ec05c4bb0e4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Neutron deleted interface bc8b891d-040a-4a55-a281-311c08ae828d; detaching it from the instance and deleting it from the info cache [ 1783.833842] env[63371]: DEBUG nova.network.neutron [req-28bfcf4c-3450-436b-a459-53e103c593e1 req-6622e856-5260-472c-8d6a-7ec05c4bb0e4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updating instance_info_cache with network_info: [{"id": "4590c30b-effd-423f-b0b2-c208bbdfffd7", "address": "fa:16:3e:dd:c9:8c", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4590c30b-ef", "ovs_interfaceid": "4590c30b-effd-423f-b0b2-c208bbdfffd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1783.840663] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Waiting for the task: (returnval){ [ 1783.840663] env[63371]: value = "task-1774763" [ 1783.840663] env[63371]: _type = "Task" [ 1783.840663] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.848743] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "refresh_cache-0518c5a8-8cc1-4829-a0cf-5f5904f6df86" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.848743] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "refresh_cache-0518c5a8-8cc1-4829-a0cf-5f5904f6df86" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.848819] env[63371]: DEBUG nova.network.neutron [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1783.850201] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for the task: (returnval){ [ 1783.850201] env[63371]: value = "task-1774764" [ 1783.850201] env[63371]: _type = "Task" [ 1783.850201] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.850450] env[63371]: DEBUG oslo_vmware.api [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774762, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.861835] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774763, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.869384] env[63371]: INFO nova.scheduler.client.report [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted allocation for migration 1bad7cd8-a319-4561-8560-524cc376e5e2 [ 1783.871762] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774764, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.148960] env[63371]: DEBUG oslo_concurrency.lockutils [req-32f10f31-809c-4306-aa3c-e45d0eed20a9 req-44f91b30-03ac-433c-a63b-504be412ec0c service nova] Releasing lock "refresh_cache-6c2edb87-7a36-4814-ac4a-199cdca1ef68" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.151710] env[63371]: DEBUG nova.network.neutron [-] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.340150] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-427318b0-a868-44ed-a29e-440a7fc7ddd7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.352473] env[63371]: DEBUG oslo_vmware.api [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774762, 'name': PowerOffVM_Task, 'duration_secs': 0.553802} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.355908] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1784.356111] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1784.362287] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-830db915-1fee-4bdb-9d7c-8e227668c336 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.366683] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9ed5f4-f430-4193-8fc7-0f1d60dffd0d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.378680] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774763, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505556} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.380568] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.382272] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a4ea9e26-4b6c-408b-8ae2-a80d161e7bb1 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 21.753s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.383328] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 6c2edb87-7a36-4814-ac4a-199cdca1ef68/6c2edb87-7a36-4814-ac4a-199cdca1ef68.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1784.383567] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1784.389813] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.007s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.389813] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.389813] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.389813] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.390049] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3e0de153-7143-43f5-9460-81300b0d9a46 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.393202] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774764, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.393319] env[63371]: DEBUG nova.network.neutron [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1784.395234] env[63371]: INFO nova.compute.manager [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Terminating instance [ 1784.399492] env[63371]: DEBUG nova.compute.manager [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1784.399750] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1784.401128] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf5005c-9169-40ef-a683-7b3d0020bdf8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.418063] env[63371]: DEBUG nova.compute.manager [req-28bfcf4c-3450-436b-a459-53e103c593e1 req-6622e856-5260-472c-8d6a-7ec05c4bb0e4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Detach interface failed, port_id=bc8b891d-040a-4a55-a281-311c08ae828d, reason: Instance d00602b9-16bf-4c11-bc47-6076dddbf159 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1784.425362] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Waiting for the task: (returnval){ [ 1784.425362] env[63371]: value = "task-1774766" [ 1784.425362] env[63371]: _type = "Task" [ 1784.425362] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.433385] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1784.434549] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ab00730-2146-4448-a2cc-66d3ed56df1e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.438588] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774766, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.442231] env[63371]: DEBUG oslo_vmware.api [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1784.442231] env[63371]: value = "task-1774767" [ 1784.442231] env[63371]: _type = "Task" [ 1784.442231] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.451040] env[63371]: DEBUG oslo_vmware.api [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.504717] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1784.504975] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1784.505193] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Deleting the datastore file [datastore1] 9985dbcd-4498-4629-aae5-5e1933307c50 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1784.505462] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a8573f0-976b-42c0-a4ff-3ef45b26ae65 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.513108] env[63371]: DEBUG oslo_vmware.api [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for the task: (returnval){ [ 1784.513108] env[63371]: value = "task-1774768" [ 1784.513108] env[63371]: _type = "Task" [ 1784.513108] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.521570] env[63371]: DEBUG oslo_vmware.api [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774768, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.575804] env[63371]: DEBUG nova.network.neutron [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Updating instance_info_cache with network_info: [{"id": "e4ee0c90-4a70-4f4e-b976-34412c13da2f", "address": "fa:16:3e:2d:a3:3a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4ee0c90-4a", "ovs_interfaceid": "e4ee0c90-4a70-4f4e-b976-34412c13da2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.618645] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25be9c38-a7e5-4a49-b2af-a52389740638 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.627508] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1265ae68-c771-448e-bf96-329ff5d690e2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.658767] env[63371]: INFO nova.compute.manager [-] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Took 1.59 seconds to deallocate network for instance. [ 1784.661239] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dcaa65-fd9b-4b77-b6f9-37651e75d7b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.673046] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b00e57-1ba2-48cd-a617-ce3574ecfbb6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.687432] env[63371]: DEBUG nova.compute.provider_tree [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1784.839470] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "d042bb16-c84d-42bb-af3f-38c08995fd91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.839725] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "d042bb16-c84d-42bb-af3f-38c08995fd91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.866065] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774764, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.913675] env[63371]: DEBUG nova.compute.manager [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Received event network-vif-plugged-e4ee0c90-4a70-4f4e-b976-34412c13da2f {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1784.913892] env[63371]: DEBUG oslo_concurrency.lockutils [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] Acquiring lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1784.914129] env[63371]: DEBUG oslo_concurrency.lockutils [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] Lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1784.914300] env[63371]: DEBUG oslo_concurrency.lockutils [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] Lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.914465] env[63371]: DEBUG nova.compute.manager [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] No waiting events found dispatching network-vif-plugged-e4ee0c90-4a70-4f4e-b976-34412c13da2f {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1784.914626] env[63371]: WARNING nova.compute.manager [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Received unexpected event network-vif-plugged-e4ee0c90-4a70-4f4e-b976-34412c13da2f for instance with vm_state building and task_state spawning. [ 1784.914780] env[63371]: DEBUG nova.compute.manager [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Received event network-changed-e4ee0c90-4a70-4f4e-b976-34412c13da2f {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1784.914930] env[63371]: DEBUG nova.compute.manager [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Refreshing instance network info cache due to event network-changed-e4ee0c90-4a70-4f4e-b976-34412c13da2f. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1784.915134] env[63371]: DEBUG oslo_concurrency.lockutils [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] Acquiring lock "refresh_cache-0518c5a8-8cc1-4829-a0cf-5f5904f6df86" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.935018] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774766, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074659} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.935273] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1784.936033] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ff79fb0-4075-4bc1-82ce-db1bc037beaf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.958597] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] 6c2edb87-7a36-4814-ac4a-199cdca1ef68/6c2edb87-7a36-4814-ac4a-199cdca1ef68.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1784.961753] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd4890cd-ec2b-4454-93f3-d456ae2670c8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.980458] env[63371]: DEBUG oslo_vmware.api [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774767, 'name': PowerOffVM_Task, 'duration_secs': 0.19154} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.981709] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1784.981890] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1784.982198] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Waiting for the task: (returnval){ [ 1784.982198] env[63371]: value = "task-1774769" [ 1784.982198] env[63371]: _type = "Task" [ 1784.982198] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.982668] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02647995-81f7-45fd-8ce4-340f393c3f11 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.991654] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774769, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.021615] env[63371]: DEBUG oslo_vmware.api [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Task: {'id': task-1774768, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125802} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.021844] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1785.022079] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1785.022266] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1785.022437] env[63371]: INFO nova.compute.manager [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Took 1.72 seconds to destroy the instance on the hypervisor. [ 1785.022668] env[63371]: DEBUG oslo.service.loopingcall [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1785.022907] env[63371]: DEBUG nova.compute.manager [-] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1785.023029] env[63371]: DEBUG nova.network.neutron [-] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1785.068074] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1785.068315] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1785.068498] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleting the datastore file [datastore1] 3a6c12a7-732f-4a73-a8c5-6810b554cc03 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1785.070688] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29f02470-e5e3-46f8-b3c1-d854396f95e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.076850] env[63371]: DEBUG oslo_vmware.api [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1785.076850] env[63371]: value = "task-1774771" [ 1785.076850] env[63371]: _type = "Task" [ 1785.076850] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.080055] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "refresh_cache-0518c5a8-8cc1-4829-a0cf-5f5904f6df86" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.080360] env[63371]: DEBUG nova.compute.manager [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Instance network_info: |[{"id": "e4ee0c90-4a70-4f4e-b976-34412c13da2f", "address": "fa:16:3e:2d:a3:3a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4ee0c90-4a", "ovs_interfaceid": "e4ee0c90-4a70-4f4e-b976-34412c13da2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1785.080642] env[63371]: DEBUG oslo_concurrency.lockutils [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] Acquired lock "refresh_cache-0518c5a8-8cc1-4829-a0cf-5f5904f6df86" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1785.080823] env[63371]: DEBUG nova.network.neutron [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Refreshing network info cache for port e4ee0c90-4a70-4f4e-b976-34412c13da2f {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1785.083627] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2d:a3:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e4ee0c90-4a70-4f4e-b976-34412c13da2f', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1785.091388] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Creating folder: Project (5942787fa9584e8fbf5ddd459907ce5d). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1785.092833] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d6385b6-74ac-47d4-87ef-59e8c0dada85 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.098465] env[63371]: DEBUG oslo_vmware.api [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774771, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.107089] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Created folder: Project (5942787fa9584e8fbf5ddd459907ce5d) in parent group-v368199. [ 1785.107280] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Creating folder: Instances. Parent ref: group-v368474. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1785.107549] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d74f715-cae8-4f9d-8466-8ab86f3d61ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.118021] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Created folder: Instances in parent group-v368474. [ 1785.118316] env[63371]: DEBUG oslo.service.loopingcall [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1785.118581] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1785.118886] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd508963-89ca-4c9f-9bf7-773ffe46a782 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.150055] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1785.150055] env[63371]: value = "task-1774774" [ 1785.150055] env[63371]: _type = "Task" [ 1785.150055] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.159975] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774774, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.169174] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.219912] env[63371]: ERROR nova.scheduler.client.report [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [req-f104e162-12ee-4e6b-9b4f-a956c639a96b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f104e162-12ee-4e6b-9b4f-a956c639a96b"}]} [ 1785.240122] env[63371]: DEBUG nova.scheduler.client.report [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1785.254607] env[63371]: DEBUG nova.scheduler.client.report [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1785.254961] env[63371]: DEBUG nova.compute.provider_tree [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1785.267370] env[63371]: DEBUG nova.scheduler.client.report [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1785.287108] env[63371]: DEBUG nova.scheduler.client.report [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1785.341491] env[63371]: DEBUG nova.compute.manager [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1785.367741] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774764, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.494168] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774769, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.589112] env[63371]: DEBUG oslo_vmware.api [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774771, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128866} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.592843] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1785.592843] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1785.592843] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1785.592843] env[63371]: INFO nova.compute.manager [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1785.592843] env[63371]: DEBUG oslo.service.loopingcall [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1785.593245] env[63371]: DEBUG nova.compute.manager [-] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1785.593245] env[63371]: DEBUG nova.network.neutron [-] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1785.657181] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb4639f-e67e-4968-948f-dd43ed56b8af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.665787] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774774, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.667626] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8e425c-a01f-429c-bcd7-7af9f72f136e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.701682] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e9b853-78d9-4ddf-91b9-59ee04b6019a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.710399] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750586b3-01ef-460e-bf77-1f03595fd678 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.729561] env[63371]: DEBUG nova.compute.provider_tree [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1785.844427] env[63371]: DEBUG nova.compute.manager [req-6f5a85e9-6a72-4677-88af-e5db22bd97b4 req-5d1f3cf2-2f89-4d56-a78f-95fc0b1fbed1 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Received event network-vif-deleted-d3f41a80-52de-46a5-ac15-9a26e6710908 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1785.844427] env[63371]: INFO nova.compute.manager [req-6f5a85e9-6a72-4677-88af-e5db22bd97b4 req-5d1f3cf2-2f89-4d56-a78f-95fc0b1fbed1 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Neutron deleted interface d3f41a80-52de-46a5-ac15-9a26e6710908; detaching it from the instance and deleting it from the info cache [ 1785.844427] env[63371]: DEBUG nova.network.neutron [req-6f5a85e9-6a72-4677-88af-e5db22bd97b4 req-5d1f3cf2-2f89-4d56-a78f-95fc0b1fbed1 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.869285] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.875481] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774764, 'name': ReconfigVM_Task, 'duration_secs': 1.589218} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.875950] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7/3b2ed231-9f9c-4d28-9c81-034c2d17c9a7.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1785.876812] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-61b176ab-533b-45d6-864c-67d9b7c4229b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.884363] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for the task: (returnval){ [ 1785.884363] env[63371]: value = "task-1774775" [ 1785.884363] env[63371]: _type = "Task" [ 1785.884363] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.896861] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774775, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.917816] env[63371]: DEBUG nova.network.neutron [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Updated VIF entry in instance network info cache for port e4ee0c90-4a70-4f4e-b976-34412c13da2f. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1785.918204] env[63371]: DEBUG nova.network.neutron [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Updating instance_info_cache with network_info: [{"id": "e4ee0c90-4a70-4f4e-b976-34412c13da2f", "address": "fa:16:3e:2d:a3:3a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4ee0c90-4a", "ovs_interfaceid": "e4ee0c90-4a70-4f4e-b976-34412c13da2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.003186] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774769, 'name': ReconfigVM_Task, 'duration_secs': 0.698903} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.003599] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Reconfigured VM instance instance-00000064 to attach disk [datastore1] 6c2edb87-7a36-4814-ac4a-199cdca1ef68/6c2edb87-7a36-4814-ac4a-199cdca1ef68.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1786.006488] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5bb1eca4-a3f7-4b78-b72c-db4ca83997af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.016989] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Waiting for the task: (returnval){ [ 1786.016989] env[63371]: value = "task-1774776" [ 1786.016989] env[63371]: _type = "Task" [ 1786.016989] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.032049] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774776, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.071207] env[63371]: DEBUG nova.network.neutron [-] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.163257] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774774, 'name': CreateVM_Task, 'duration_secs': 0.701134} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.163467] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1786.164099] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.164275] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.164587] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1786.164841] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0932f220-f23d-4cfc-a4c8-35c8f2a717cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.170020] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1786.170020] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ef6f0e-b1fb-fd9c-1182-0ccf7444ce03" [ 1786.170020] env[63371]: _type = "Task" [ 1786.170020] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.177863] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ef6f0e-b1fb-fd9c-1182-0ccf7444ce03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.265719] env[63371]: DEBUG nova.scheduler.client.report [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 148 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1786.265984] env[63371]: DEBUG nova.compute.provider_tree [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 148 to 149 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1786.266185] env[63371]: DEBUG nova.compute.provider_tree [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1786.347855] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e35eb23e-a258-45dd-913b-5a25a5fa532a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.358078] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4f2b1e-23e0-4a94-97b4-a9bfd8dc88fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.368922] env[63371]: DEBUG nova.network.neutron [-] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1786.389218] env[63371]: DEBUG nova.compute.manager [req-6f5a85e9-6a72-4677-88af-e5db22bd97b4 req-5d1f3cf2-2f89-4d56-a78f-95fc0b1fbed1 service nova] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Detach interface failed, port_id=d3f41a80-52de-46a5-ac15-9a26e6710908, reason: Instance 9985dbcd-4498-4629-aae5-5e1933307c50 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1786.398781] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774775, 'name': Rename_Task, 'duration_secs': 0.134837} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.399059] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1786.399347] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82e2abe0-a1c5-4239-91b1-29cb59c77815 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.409121] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for the task: (returnval){ [ 1786.409121] env[63371]: value = "task-1774777" [ 1786.409121] env[63371]: _type = "Task" [ 1786.409121] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.417277] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774777, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.420829] env[63371]: DEBUG oslo_concurrency.lockutils [req-383f3160-aa3e-4d87-9b20-f6ed44f6588b req-da4e2900-5821-4155-88ea-5cbd3351f793 service nova] Releasing lock "refresh_cache-0518c5a8-8cc1-4829-a0cf-5f5904f6df86" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.528214] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774776, 'name': Rename_Task, 'duration_secs': 0.132181} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.528549] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1786.528747] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-009e0c45-fe29-40c7-be7e-0794a36a644a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.534966] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Waiting for the task: (returnval){ [ 1786.534966] env[63371]: value = "task-1774778" [ 1786.534966] env[63371]: _type = "Task" [ 1786.534966] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.544745] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774778, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.574631] env[63371]: INFO nova.compute.manager [-] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Took 1.55 seconds to deallocate network for instance. [ 1786.681304] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ef6f0e-b1fb-fd9c-1182-0ccf7444ce03, 'name': SearchDatastore_Task, 'duration_secs': 0.008439} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.681620] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1786.681866] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1786.682161] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1786.682314] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1786.682521] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1786.682815] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ea0cbb5-ab0c-40fe-9995-3d5a1fe06139 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.692484] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1786.693364] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1786.693611] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7406501f-d505-451c-adcd-c86680991bfa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.699860] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1786.699860] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ba576b-1714-dae3-4346-f83b4e00ab9b" [ 1786.699860] env[63371]: _type = "Task" [ 1786.699860] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.708657] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ba576b-1714-dae3-4346-f83b4e00ab9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.771384] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.526s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1786.771923] env[63371]: DEBUG nova.compute.manager [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1786.774673] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.586s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1786.776217] env[63371]: INFO nova.compute.claims [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1786.872887] env[63371]: INFO nova.compute.manager [-] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Took 1.28 seconds to deallocate network for instance. [ 1786.923893] env[63371]: DEBUG oslo_vmware.api [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774777, 'name': PowerOnVM_Task, 'duration_secs': 0.508392} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.924195] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1786.924402] env[63371]: INFO nova.compute.manager [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Took 10.01 seconds to spawn the instance on the hypervisor. [ 1786.924583] env[63371]: DEBUG nova.compute.manager [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1786.925423] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ec0a2c-de8c-48de-bc59-bc25e2c212d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.967354] env[63371]: DEBUG nova.compute.manager [req-9f2376e1-44e7-45dd-8ee6-4eee0d2d6b67 req-80fd98ab-afc3-4c9f-8a1f-d36f1391e15b service nova] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Received event network-vif-deleted-e144cd6b-c3f5-496e-99c6-19e9ab58c042 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1787.045801] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774778, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.083254] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.210611] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ba576b-1714-dae3-4346-f83b4e00ab9b, 'name': SearchDatastore_Task, 'duration_secs': 0.011519} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.211408] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77621179-2ce0-4daf-b9f8-ad3ebf7583cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.216689] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1787.216689] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b37322-73a9-abfb-8c8b-5cf6e4c16490" [ 1787.216689] env[63371]: _type = "Task" [ 1787.216689] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.224388] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b37322-73a9-abfb-8c8b-5cf6e4c16490, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.283045] env[63371]: DEBUG nova.compute.utils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1787.284434] env[63371]: DEBUG nova.compute.manager [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1787.284687] env[63371]: DEBUG nova.network.neutron [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1787.333869] env[63371]: DEBUG nova.policy [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '739c32320b904c76ace3302c40b1d627', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5942787fa9584e8fbf5ddd459907ce5d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1787.381221] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1787.449023] env[63371]: INFO nova.compute.manager [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Took 26.08 seconds to build instance. [ 1787.549072] env[63371]: DEBUG oslo_vmware.api [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774778, 'name': PowerOnVM_Task, 'duration_secs': 0.928609} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.549072] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1787.549072] env[63371]: INFO nova.compute.manager [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Took 7.20 seconds to spawn the instance on the hypervisor. [ 1787.549072] env[63371]: DEBUG nova.compute.manager [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1787.549917] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004308a1-9199-4857-91fb-c873bd8aae1d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.699472] env[63371]: DEBUG nova.network.neutron [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Successfully created port: f560031e-f701-4309-aead-34a87be57b22 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1787.736065] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b37322-73a9-abfb-8c8b-5cf6e4c16490, 'name': SearchDatastore_Task, 'duration_secs': 0.019328} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.736355] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1787.737040] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0518c5a8-8cc1-4829-a0cf-5f5904f6df86/0518c5a8-8cc1-4829-a0cf-5f5904f6df86.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1787.737040] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7406c69d-e8b0-4275-a400-199c6feaf5ab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.747913] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1787.747913] env[63371]: value = "task-1774779" [ 1787.747913] env[63371]: _type = "Task" [ 1787.747913] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.756518] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.789249] env[63371]: DEBUG nova.compute.manager [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1787.949501] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b584a676-461e-4321-a541-bbbf2aee88db tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.595s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.070157] env[63371]: INFO nova.compute.manager [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Took 24.69 seconds to build instance. [ 1788.102145] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7a248c-94f2-441f-abbd-d9bef7ae8ca5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.110546] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53c311d7-64c2-488e-ae5b-f5788f8d0c4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.142410] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265378c5-2d31-4ea2-b650-8b77e69681e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.150568] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e12976-9392-404e-830e-15d3576729f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.166653] env[63371]: DEBUG nova.compute.provider_tree [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1788.258659] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774779, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.572188] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c43933fb-dad7-4d84-9b25-bbead6f3110b tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Lock "6c2edb87-7a36-4814-ac4a-199cdca1ef68" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.198s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.672023] env[63371]: DEBUG nova.scheduler.client.report [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1788.761821] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774779, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.816021} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.761821] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0518c5a8-8cc1-4829-a0cf-5f5904f6df86/0518c5a8-8cc1-4829-a0cf-5f5904f6df86.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1788.761821] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1788.762270] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1d1ea88a-e388-4b69-8d5b-9c650f8f6da4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.770679] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1788.770679] env[63371]: value = "task-1774780" [ 1788.770679] env[63371]: _type = "Task" [ 1788.770679] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.783137] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774780, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.798685] env[63371]: DEBUG nova.compute.manager [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1788.828397] env[63371]: DEBUG nova.virt.hardware [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1788.828852] env[63371]: DEBUG nova.virt.hardware [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1788.829055] env[63371]: DEBUG nova.virt.hardware [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1788.829263] env[63371]: DEBUG nova.virt.hardware [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1788.829439] env[63371]: DEBUG nova.virt.hardware [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1788.829629] env[63371]: DEBUG nova.virt.hardware [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1788.829869] env[63371]: DEBUG nova.virt.hardware [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1788.830051] env[63371]: DEBUG nova.virt.hardware [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1788.830298] env[63371]: DEBUG nova.virt.hardware [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1788.831030] env[63371]: DEBUG nova.virt.hardware [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1788.831030] env[63371]: DEBUG nova.virt.hardware [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1788.831672] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49ee509-033b-4803-8dd4-b82e956617aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.840459] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53135474-bb46-408d-9629-48b868058bd2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.117960] env[63371]: DEBUG nova.compute.manager [req-71912de3-de96-47a6-a76b-ad366d1de511 req-31fae031-4ac6-4025-a374-a0c85fe6d0f3 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Received event network-changed-dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1789.118201] env[63371]: DEBUG nova.compute.manager [req-71912de3-de96-47a6-a76b-ad366d1de511 req-31fae031-4ac6-4025-a374-a0c85fe6d0f3 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Refreshing instance network info cache due to event network-changed-dd28ee6f-5efa-4009-842b-c1c9af10f8ea. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1789.118422] env[63371]: DEBUG oslo_concurrency.lockutils [req-71912de3-de96-47a6-a76b-ad366d1de511 req-31fae031-4ac6-4025-a374-a0c85fe6d0f3 service nova] Acquiring lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.118529] env[63371]: DEBUG oslo_concurrency.lockutils [req-71912de3-de96-47a6-a76b-ad366d1de511 req-31fae031-4ac6-4025-a374-a0c85fe6d0f3 service nova] Acquired lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.118688] env[63371]: DEBUG nova.network.neutron [req-71912de3-de96-47a6-a76b-ad366d1de511 req-31fae031-4ac6-4025-a374-a0c85fe6d0f3 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Refreshing network info cache for port dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1789.175068] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.400s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.175635] env[63371]: DEBUG nova.compute.manager [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1789.178830] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.036s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.179042] env[63371]: DEBUG nova.objects.instance [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'resources' on Instance uuid b523486c-adae-4322-80be-1f3bf33ca192 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1789.232183] env[63371]: DEBUG nova.compute.manager [req-118508db-cf7f-4bf3-bf63-91cf2629fb29 req-3545db54-7adc-439c-b9c6-d1f867163caf service nova] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Received event network-vif-plugged-f560031e-f701-4309-aead-34a87be57b22 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1789.232699] env[63371]: DEBUG oslo_concurrency.lockutils [req-118508db-cf7f-4bf3-bf63-91cf2629fb29 req-3545db54-7adc-439c-b9c6-d1f867163caf service nova] Acquiring lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.232893] env[63371]: DEBUG oslo_concurrency.lockutils [req-118508db-cf7f-4bf3-bf63-91cf2629fb29 req-3545db54-7adc-439c-b9c6-d1f867163caf service nova] Lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.233147] env[63371]: DEBUG oslo_concurrency.lockutils [req-118508db-cf7f-4bf3-bf63-91cf2629fb29 req-3545db54-7adc-439c-b9c6-d1f867163caf service nova] Lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.233333] env[63371]: DEBUG nova.compute.manager [req-118508db-cf7f-4bf3-bf63-91cf2629fb29 req-3545db54-7adc-439c-b9c6-d1f867163caf service nova] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] No waiting events found dispatching network-vif-plugged-f560031e-f701-4309-aead-34a87be57b22 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1789.233502] env[63371]: WARNING nova.compute.manager [req-118508db-cf7f-4bf3-bf63-91cf2629fb29 req-3545db54-7adc-439c-b9c6-d1f867163caf service nova] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Received unexpected event network-vif-plugged-f560031e-f701-4309-aead-34a87be57b22 for instance with vm_state building and task_state spawning. [ 1789.286826] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774780, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066552} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.291018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1789.291018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ff1ff1-3d9c-49bf-9abf-36d4760fd9f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.317930] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 0518c5a8-8cc1-4829-a0cf-5f5904f6df86/0518c5a8-8cc1-4829-a0cf-5f5904f6df86.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1789.317930] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5628727d-811d-448c-b324-4ea7e1cbb85d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.338086] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1789.338086] env[63371]: value = "task-1774781" [ 1789.338086] env[63371]: _type = "Task" [ 1789.338086] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.347368] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774781, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.362315] env[63371]: DEBUG nova.network.neutron [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Successfully updated port: f560031e-f701-4309-aead-34a87be57b22 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1789.595023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Acquiring lock "6c2edb87-7a36-4814-ac4a-199cdca1ef68" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.595023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Lock "6c2edb87-7a36-4814-ac4a-199cdca1ef68" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.595023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Acquiring lock "6c2edb87-7a36-4814-ac4a-199cdca1ef68-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1789.595023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Lock "6c2edb87-7a36-4814-ac4a-199cdca1ef68-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1789.595023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Lock "6c2edb87-7a36-4814-ac4a-199cdca1ef68-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.597592] env[63371]: INFO nova.compute.manager [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Terminating instance [ 1789.599314] env[63371]: DEBUG nova.compute.manager [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1789.599523] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1789.600379] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670cc7ad-598b-44f0-a595-8e147c514dc0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.607797] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1789.608038] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2918118-054c-40d8-93c6-7ffb4b772e3e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.613282] env[63371]: DEBUG oslo_vmware.api [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Waiting for the task: (returnval){ [ 1789.613282] env[63371]: value = "task-1774782" [ 1789.613282] env[63371]: _type = "Task" [ 1789.613282] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.623608] env[63371]: DEBUG oslo_vmware.api [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774782, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.682859] env[63371]: DEBUG nova.compute.utils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1789.688451] env[63371]: DEBUG nova.compute.manager [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1789.688451] env[63371]: DEBUG nova.network.neutron [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1789.731985] env[63371]: DEBUG nova.policy [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ef97c1a9a174c1888972e6f281eecbe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2601d597b4d64481ace490d56d1056a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1789.851975] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774781, 'name': ReconfigVM_Task, 'duration_secs': 0.342141} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.851975] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 0518c5a8-8cc1-4829-a0cf-5f5904f6df86/0518c5a8-8cc1-4829-a0cf-5f5904f6df86.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1789.852519] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e2af6ce6-28c8-48af-8f10-edd157644784 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.862470] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1789.862470] env[63371]: value = "task-1774783" [ 1789.862470] env[63371]: _type = "Task" [ 1789.862470] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.867440] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "refresh_cache-1ec21edd-7b7c-4a2b-983f-8aa6c022e033" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1789.867552] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "refresh_cache-1ec21edd-7b7c-4a2b-983f-8aa6c022e033" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1789.868076] env[63371]: DEBUG nova.network.neutron [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1789.885916] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774783, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.887356] env[63371]: DEBUG nova.network.neutron [req-71912de3-de96-47a6-a76b-ad366d1de511 req-31fae031-4ac6-4025-a374-a0c85fe6d0f3 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Updated VIF entry in instance network info cache for port dd28ee6f-5efa-4009-842b-c1c9af10f8ea. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1789.887743] env[63371]: DEBUG nova.network.neutron [req-71912de3-de96-47a6-a76b-ad366d1de511 req-31fae031-4ac6-4025-a374-a0c85fe6d0f3 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Updating instance_info_cache with network_info: [{"id": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "address": "fa:16:3e:d1:7d:4b", "network": {"id": "1f2162b2-ad92-4b6d-9e1c-02a0d65dd460", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-529696742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40f26a0147d245e59fa8a860280852e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd28ee6f-5e", "ovs_interfaceid": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.036538] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d18aa4c3-98a8-4579-8eaf-101c983c0cb5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.044937] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c4778f-0f43-4342-9064-2bda5cb21476 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.081233] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6935bb90-8e07-4b77-8f52-8d03f25c45fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.090011] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92392170-90a8-4cc2-a66a-b5994a7fece7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.107103] env[63371]: DEBUG nova.compute.provider_tree [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1790.118391] env[63371]: DEBUG nova.network.neutron [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Successfully created port: f5b22240-e8c4-447a-bc92-3a83ae9674ec {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1790.126408] env[63371]: DEBUG oslo_vmware.api [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774782, 'name': PowerOffVM_Task, 'duration_secs': 0.271672} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.126679] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1790.126847] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1790.127112] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4a8b9c7-2519-4828-8ce5-53bbe1b084b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.189224] env[63371]: DEBUG nova.compute.manager [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1790.314688] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1790.314688] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1790.317120] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Deleting the datastore file [datastore1] 6c2edb87-7a36-4814-ac4a-199cdca1ef68 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1790.317120] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ce3d5f36-40a9-4590-a005-7d58a7ec68bc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.322314] env[63371]: DEBUG oslo_vmware.api [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Waiting for the task: (returnval){ [ 1790.322314] env[63371]: value = "task-1774785" [ 1790.322314] env[63371]: _type = "Task" [ 1790.322314] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.332303] env[63371]: DEBUG oslo_vmware.api [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774785, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.371739] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774783, 'name': Rename_Task, 'duration_secs': 0.25925} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.372061] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1790.372320] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29d86968-ba13-457a-a7de-078d68aa0606 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.379239] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1790.379239] env[63371]: value = "task-1774786" [ 1790.379239] env[63371]: _type = "Task" [ 1790.379239] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.387490] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774786, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.393362] env[63371]: DEBUG oslo_concurrency.lockutils [req-71912de3-de96-47a6-a76b-ad366d1de511 req-31fae031-4ac6-4025-a374-a0c85fe6d0f3 service nova] Releasing lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1790.405483] env[63371]: DEBUG nova.network.neutron [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1790.549512] env[63371]: DEBUG nova.network.neutron [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Updating instance_info_cache with network_info: [{"id": "f560031e-f701-4309-aead-34a87be57b22", "address": "fa:16:3e:63:fd:af", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf560031e-f7", "ovs_interfaceid": "f560031e-f701-4309-aead-34a87be57b22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.610611] env[63371]: DEBUG nova.scheduler.client.report [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1790.834638] env[63371]: DEBUG oslo_vmware.api [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Task: {'id': task-1774785, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.342701} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.834867] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1790.835078] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1790.835261] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1790.835435] env[63371]: INFO nova.compute.manager [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1790.835742] env[63371]: DEBUG oslo.service.loopingcall [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1790.835973] env[63371]: DEBUG nova.compute.manager [-] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1790.836106] env[63371]: DEBUG nova.network.neutron [-] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1790.889560] env[63371]: DEBUG oslo_vmware.api [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774786, 'name': PowerOnVM_Task, 'duration_secs': 0.472084} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.889825] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1790.890049] env[63371]: INFO nova.compute.manager [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Took 8.08 seconds to spawn the instance on the hypervisor. [ 1790.890368] env[63371]: DEBUG nova.compute.manager [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1790.891044] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8fb33e-ba83-46e9-ac83-d621c8b50c7c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.053474] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "refresh_cache-1ec21edd-7b7c-4a2b-983f-8aa6c022e033" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.053873] env[63371]: DEBUG nova.compute.manager [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Instance network_info: |[{"id": "f560031e-f701-4309-aead-34a87be57b22", "address": "fa:16:3e:63:fd:af", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf560031e-f7", "ovs_interfaceid": "f560031e-f701-4309-aead-34a87be57b22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1791.054334] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:63:fd:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f560031e-f701-4309-aead-34a87be57b22', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1791.061969] env[63371]: DEBUG oslo.service.loopingcall [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1791.062349] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1791.062463] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed96c1f3-04ff-4851-a59c-27aa7c12edb1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.084718] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1791.084718] env[63371]: value = "task-1774787" [ 1791.084718] env[63371]: _type = "Task" [ 1791.084718] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.093667] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774787, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.115813] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.937s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.120031] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.643s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.120031] env[63371]: DEBUG nova.objects.instance [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lazy-loading 'resources' on Instance uuid cf63c2a2-ee72-464e-944d-5e53ca8635ac {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1791.141653] env[63371]: INFO nova.scheduler.client.report [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Deleted allocations for instance b523486c-adae-4322-80be-1f3bf33ca192 [ 1791.200572] env[63371]: DEBUG nova.compute.manager [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1791.230156] env[63371]: DEBUG nova.virt.hardware [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1791.230390] env[63371]: DEBUG nova.virt.hardware [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1791.230539] env[63371]: DEBUG nova.virt.hardware [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1791.230738] env[63371]: DEBUG nova.virt.hardware [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1791.230911] env[63371]: DEBUG nova.virt.hardware [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1791.231146] env[63371]: DEBUG nova.virt.hardware [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1791.232176] env[63371]: DEBUG nova.virt.hardware [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1791.232176] env[63371]: DEBUG nova.virt.hardware [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1791.232176] env[63371]: DEBUG nova.virt.hardware [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1791.232176] env[63371]: DEBUG nova.virt.hardware [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1791.232176] env[63371]: DEBUG nova.virt.hardware [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1791.235778] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11ff955-9a7f-4177-a97a-505a9e8f7149 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.241980] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ee494c-9fb9-48d8-8163-8a63b52e9704 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.259166] env[63371]: DEBUG nova.compute.manager [req-ccff5198-165d-42d7-b5da-b9a027f5bfb7 req-5fb87b7c-d196-41e3-9433-a75aa26ad43c service nova] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Received event network-changed-f560031e-f701-4309-aead-34a87be57b22 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1791.259375] env[63371]: DEBUG nova.compute.manager [req-ccff5198-165d-42d7-b5da-b9a027f5bfb7 req-5fb87b7c-d196-41e3-9433-a75aa26ad43c service nova] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Refreshing instance network info cache due to event network-changed-f560031e-f701-4309-aead-34a87be57b22. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1791.259512] env[63371]: DEBUG oslo_concurrency.lockutils [req-ccff5198-165d-42d7-b5da-b9a027f5bfb7 req-5fb87b7c-d196-41e3-9433-a75aa26ad43c service nova] Acquiring lock "refresh_cache-1ec21edd-7b7c-4a2b-983f-8aa6c022e033" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1791.259656] env[63371]: DEBUG oslo_concurrency.lockutils [req-ccff5198-165d-42d7-b5da-b9a027f5bfb7 req-5fb87b7c-d196-41e3-9433-a75aa26ad43c service nova] Acquired lock "refresh_cache-1ec21edd-7b7c-4a2b-983f-8aa6c022e033" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1791.259820] env[63371]: DEBUG nova.network.neutron [req-ccff5198-165d-42d7-b5da-b9a027f5bfb7 req-5fb87b7c-d196-41e3-9433-a75aa26ad43c service nova] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Refreshing network info cache for port f560031e-f701-4309-aead-34a87be57b22 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1791.410254] env[63371]: INFO nova.compute.manager [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Took 26.99 seconds to build instance. [ 1791.547748] env[63371]: DEBUG nova.compute.manager [req-ef81bd74-4eae-499f-841a-2b6bfef06940 req-70fe50f8-955d-41e5-bdac-ebb8ba5aa483 service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Received event network-vif-plugged-f5b22240-e8c4-447a-bc92-3a83ae9674ec {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1791.548145] env[63371]: DEBUG oslo_concurrency.lockutils [req-ef81bd74-4eae-499f-841a-2b6bfef06940 req-70fe50f8-955d-41e5-bdac-ebb8ba5aa483 service nova] Acquiring lock "9885de9e-c640-4d82-a47a-980988d89deb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.548456] env[63371]: DEBUG oslo_concurrency.lockutils [req-ef81bd74-4eae-499f-841a-2b6bfef06940 req-70fe50f8-955d-41e5-bdac-ebb8ba5aa483 service nova] Lock "9885de9e-c640-4d82-a47a-980988d89deb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.548682] env[63371]: DEBUG oslo_concurrency.lockutils [req-ef81bd74-4eae-499f-841a-2b6bfef06940 req-70fe50f8-955d-41e5-bdac-ebb8ba5aa483 service nova] Lock "9885de9e-c640-4d82-a47a-980988d89deb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.548907] env[63371]: DEBUG nova.compute.manager [req-ef81bd74-4eae-499f-841a-2b6bfef06940 req-70fe50f8-955d-41e5-bdac-ebb8ba5aa483 service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] No waiting events found dispatching network-vif-plugged-f5b22240-e8c4-447a-bc92-3a83ae9674ec {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1791.549155] env[63371]: WARNING nova.compute.manager [req-ef81bd74-4eae-499f-841a-2b6bfef06940 req-70fe50f8-955d-41e5-bdac-ebb8ba5aa483 service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Received unexpected event network-vif-plugged-f5b22240-e8c4-447a-bc92-3a83ae9674ec for instance with vm_state building and task_state spawning. [ 1791.581551] env[63371]: DEBUG nova.network.neutron [-] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1791.595146] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774787, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.654182] env[63371]: DEBUG oslo_concurrency.lockutils [None req-64d4a438-dd09-424a-9242-7cdf23452332 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "b523486c-adae-4322-80be-1f3bf33ca192" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.714s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.659775] env[63371]: DEBUG nova.network.neutron [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Successfully updated port: f5b22240-e8c4-447a-bc92-3a83ae9674ec {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1791.891997] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-316a66c4-1c59-429b-803c-26f66bfb83f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.899524] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d1647f-196d-45b0-97f3-4e1a48643605 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.928389] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8f25587-6a8b-49fc-bfb4-b0b6f26c7c44 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.518s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.931188] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fbde2cc-9ef7-4253-b231-7d34a9a5a7eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.939034] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-369abded-fa4e-46ab-a957-8f03836b0b2d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.952382] env[63371]: DEBUG nova.compute.provider_tree [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1792.025685] env[63371]: DEBUG nova.network.neutron [req-ccff5198-165d-42d7-b5da-b9a027f5bfb7 req-5fb87b7c-d196-41e3-9433-a75aa26ad43c service nova] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Updated VIF entry in instance network info cache for port f560031e-f701-4309-aead-34a87be57b22. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1792.026071] env[63371]: DEBUG nova.network.neutron [req-ccff5198-165d-42d7-b5da-b9a027f5bfb7 req-5fb87b7c-d196-41e3-9433-a75aa26ad43c service nova] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Updating instance_info_cache with network_info: [{"id": "f560031e-f701-4309-aead-34a87be57b22", "address": "fa:16:3e:63:fd:af", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf560031e-f7", "ovs_interfaceid": "f560031e-f701-4309-aead-34a87be57b22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.083953] env[63371]: INFO nova.compute.manager [-] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Took 1.25 seconds to deallocate network for instance. [ 1792.098029] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774787, 'name': CreateVM_Task, 'duration_secs': 0.754283} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.098029] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1792.098392] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.098557] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.098878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1792.099154] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1ca81fe-6559-4508-a48a-df56a93d2b12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.104169] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1792.104169] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520f2373-fc16-68eb-fc0f-b12895ffb224" [ 1792.104169] env[63371]: _type = "Task" [ 1792.104169] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.113353] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520f2373-fc16-68eb-fc0f-b12895ffb224, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.162190] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "refresh_cache-9885de9e-c640-4d82-a47a-980988d89deb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.162450] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "refresh_cache-9885de9e-c640-4d82-a47a-980988d89deb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.162498] env[63371]: DEBUG nova.network.neutron [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1792.455510] env[63371]: DEBUG nova.scheduler.client.report [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1792.529870] env[63371]: DEBUG oslo_concurrency.lockutils [req-ccff5198-165d-42d7-b5da-b9a027f5bfb7 req-5fb87b7c-d196-41e3-9433-a75aa26ad43c service nova] Releasing lock "refresh_cache-1ec21edd-7b7c-4a2b-983f-8aa6c022e033" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.529870] env[63371]: DEBUG nova.compute.manager [req-ccff5198-165d-42d7-b5da-b9a027f5bfb7 req-5fb87b7c-d196-41e3-9433-a75aa26ad43c service nova] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Received event network-vif-deleted-d94a7cdb-218d-45c4-98f7-d395d584d9c7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1792.529870] env[63371]: INFO nova.compute.manager [req-ccff5198-165d-42d7-b5da-b9a027f5bfb7 req-5fb87b7c-d196-41e3-9433-a75aa26ad43c service nova] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Neutron deleted interface d94a7cdb-218d-45c4-98f7-d395d584d9c7; detaching it from the instance and deleting it from the info cache [ 1792.529870] env[63371]: DEBUG nova.network.neutron [req-ccff5198-165d-42d7-b5da-b9a027f5bfb7 req-5fb87b7c-d196-41e3-9433-a75aa26ad43c service nova] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.593550] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1792.615250] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520f2373-fc16-68eb-fc0f-b12895ffb224, 'name': SearchDatastore_Task, 'duration_secs': 0.009715} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.615625] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.615918] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1792.616048] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.616205] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.616391] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1792.616638] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a741ce88-4b82-4616-97ec-6b460817958a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.625044] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1792.625238] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1792.625941] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f66a039-b67a-47cc-b436-7761917b5018 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.630865] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1792.630865] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5278373e-02b0-fe49-7f81-8f5156e7be2b" [ 1792.630865] env[63371]: _type = "Task" [ 1792.630865] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.638560] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5278373e-02b0-fe49-7f81-8f5156e7be2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.729634] env[63371]: DEBUG nova.network.neutron [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1792.886658] env[63371]: DEBUG nova.network.neutron [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Updating instance_info_cache with network_info: [{"id": "f5b22240-e8c4-447a-bc92-3a83ae9674ec", "address": "fa:16:3e:bf:a3:51", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5b22240-e8", "ovs_interfaceid": "f5b22240-e8c4-447a-bc92-3a83ae9674ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1792.960741] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.842s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.962526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 22.201s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1792.983534] env[63371]: INFO nova.scheduler.client.report [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted allocations for instance cf63c2a2-ee72-464e-944d-5e53ca8635ac [ 1793.033235] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6a0b4f2-fb91-4d1f-88f5-5e0a01a21189 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.043620] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b7c4e90-31a4-4d96-95bd-1f3e28e83651 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.075807] env[63371]: DEBUG nova.compute.manager [req-ccff5198-165d-42d7-b5da-b9a027f5bfb7 req-5fb87b7c-d196-41e3-9433-a75aa26ad43c service nova] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Detach interface failed, port_id=d94a7cdb-218d-45c4-98f7-d395d584d9c7, reason: Instance 6c2edb87-7a36-4814-ac4a-199cdca1ef68 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1793.142653] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5278373e-02b0-fe49-7f81-8f5156e7be2b, 'name': SearchDatastore_Task, 'duration_secs': 0.009398} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.143510] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52e250a5-706d-4c9c-9a54-7db5886e062e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.149046] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1793.149046] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce6457-a9ac-63bc-2bd6-ba98b2107cea" [ 1793.149046] env[63371]: _type = "Task" [ 1793.149046] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.157742] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce6457-a9ac-63bc-2bd6-ba98b2107cea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.390210] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "refresh_cache-9885de9e-c640-4d82-a47a-980988d89deb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.390560] env[63371]: DEBUG nova.compute.manager [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Instance network_info: |[{"id": "f5b22240-e8c4-447a-bc92-3a83ae9674ec", "address": "fa:16:3e:bf:a3:51", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5b22240-e8", "ovs_interfaceid": "f5b22240-e8c4-447a-bc92-3a83ae9674ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1793.391230] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:a3:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5b22240-e8c4-447a-bc92-3a83ae9674ec', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1793.398821] env[63371]: DEBUG oslo.service.loopingcall [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1793.399097] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1793.399668] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc734f49-82f3-4dff-af4d-9e37bb3037a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.422895] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1793.422895] env[63371]: value = "task-1774788" [ 1793.422895] env[63371]: _type = "Task" [ 1793.422895] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.492131] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7fddd5a8-25af-427a-8999-9dddda585488 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "cf63c2a2-ee72-464e-944d-5e53ca8635ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.946s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1793.664208] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ce6457-a9ac-63bc-2bd6-ba98b2107cea, 'name': SearchDatastore_Task, 'duration_secs': 0.010207} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.664540] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1793.664846] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1ec21edd-7b7c-4a2b-983f-8aa6c022e033/1ec21edd-7b7c-4a2b-983f-8aa6c022e033.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1793.665156] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8ccb0e5-084e-4484-8d14-e80144f919cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.672434] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1793.672434] env[63371]: value = "task-1774789" [ 1793.672434] env[63371]: _type = "Task" [ 1793.672434] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.680892] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774789, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.713420] env[63371]: DEBUG nova.compute.manager [req-f08226df-e877-475b-8683-bac48a617e89 req-cdc64b63-fe56-40da-83ab-3ad117e81502 service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Received event network-changed-f5b22240-e8c4-447a-bc92-3a83ae9674ec {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1793.713577] env[63371]: DEBUG nova.compute.manager [req-f08226df-e877-475b-8683-bac48a617e89 req-cdc64b63-fe56-40da-83ab-3ad117e81502 service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Refreshing instance network info cache due to event network-changed-f5b22240-e8c4-447a-bc92-3a83ae9674ec. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1793.713765] env[63371]: DEBUG oslo_concurrency.lockutils [req-f08226df-e877-475b-8683-bac48a617e89 req-cdc64b63-fe56-40da-83ab-3ad117e81502 service nova] Acquiring lock "refresh_cache-9885de9e-c640-4d82-a47a-980988d89deb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.713937] env[63371]: DEBUG oslo_concurrency.lockutils [req-f08226df-e877-475b-8683-bac48a617e89 req-cdc64b63-fe56-40da-83ab-3ad117e81502 service nova] Acquired lock "refresh_cache-9885de9e-c640-4d82-a47a-980988d89deb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.714312] env[63371]: DEBUG nova.network.neutron [req-f08226df-e877-475b-8683-bac48a617e89 req-cdc64b63-fe56-40da-83ab-3ad117e81502 service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Refreshing network info cache for port f5b22240-e8c4-447a-bc92-3a83ae9674ec {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1793.756467] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1de95f-3482-45a9-a06b-504b23e64874 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.765566] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3fe662-513b-4119-ab1c-89efa3f5b924 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.795509] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24dd359a-7695-4345-8107-4a800dd4af6d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.803127] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f62aad-b51a-401a-987f-c753d6e41047 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.818882] env[63371]: DEBUG nova.compute.provider_tree [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1793.933418] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774788, 'name': CreateVM_Task, 'duration_secs': 0.387443} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.933607] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1793.934378] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.934558] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.934896] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1793.935197] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0d6fbd6-45cd-4989-a13f-a1a3951677d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.944984] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1793.944984] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525978f0-5c39-2b5b-7dcd-4050adfedda1" [ 1793.944984] env[63371]: _type = "Task" [ 1793.944984] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.956225] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525978f0-5c39-2b5b-7dcd-4050adfedda1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.182908] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774789, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.247093] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.247323] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.322131] env[63371]: DEBUG nova.scheduler.client.report [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1794.420414] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.421045] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.424146] env[63371]: DEBUG nova.network.neutron [req-f08226df-e877-475b-8683-bac48a617e89 req-cdc64b63-fe56-40da-83ab-3ad117e81502 service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Updated VIF entry in instance network info cache for port f5b22240-e8c4-447a-bc92-3a83ae9674ec. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1794.424480] env[63371]: DEBUG nova.network.neutron [req-f08226df-e877-475b-8683-bac48a617e89 req-cdc64b63-fe56-40da-83ab-3ad117e81502 service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Updating instance_info_cache with network_info: [{"id": "f5b22240-e8c4-447a-bc92-3a83ae9674ec", "address": "fa:16:3e:bf:a3:51", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5b22240-e8", "ovs_interfaceid": "f5b22240-e8c4-447a-bc92-3a83ae9674ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1794.455028] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525978f0-5c39-2b5b-7dcd-4050adfedda1, 'name': SearchDatastore_Task, 'duration_secs': 0.073262} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.455028] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.455168] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1794.456020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1794.456020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1794.456020] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1794.456255] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-50e011c8-3e8a-4a28-b9c4-24559da0e80d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.469243] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1794.469423] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1794.470132] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-798e36dd-8359-49dd-8016-71ea28c770c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.475429] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1794.475429] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52bd4fb2-15d7-5d2a-b00e-e0e0a9405164" [ 1794.475429] env[63371]: _type = "Task" [ 1794.475429] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.482890] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bd4fb2-15d7-5d2a-b00e-e0e0a9405164, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.683801] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774789, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531563} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.684068] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1ec21edd-7b7c-4a2b-983f-8aa6c022e033/1ec21edd-7b7c-4a2b-983f-8aa6c022e033.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1794.684288] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1794.684531] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed60cad1-ade7-4126-960a-4c61eea98b07 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.691151] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1794.691151] env[63371]: value = "task-1774790" [ 1794.691151] env[63371]: _type = "Task" [ 1794.691151] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.698727] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774790, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.749407] env[63371]: DEBUG nova.compute.manager [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1794.924054] env[63371]: DEBUG nova.compute.manager [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1794.927040] env[63371]: DEBUG oslo_concurrency.lockutils [req-f08226df-e877-475b-8683-bac48a617e89 req-cdc64b63-fe56-40da-83ab-3ad117e81502 service nova] Releasing lock "refresh_cache-9885de9e-c640-4d82-a47a-980988d89deb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.986809] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52bd4fb2-15d7-5d2a-b00e-e0e0a9405164, 'name': SearchDatastore_Task, 'duration_secs': 0.093541} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.987688] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e1a067b-cf7e-43e1-8dec-3007d7c27d5c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.993516] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1794.993516] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b0fa9b-26f6-23e3-102c-51daf3513bb4" [ 1794.993516] env[63371]: _type = "Task" [ 1794.993516] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.002458] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b0fa9b-26f6-23e3-102c-51daf3513bb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.201169] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774790, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064093} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.201442] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1795.202339] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d6dcb0-30d1-4063-bc93-65ca9890d36f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.224590] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 1ec21edd-7b7c-4a2b-983f-8aa6c022e033/1ec21edd-7b7c-4a2b-983f-8aa6c022e033.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1795.225152] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a773a2fa-3b14-47d8-a7b3-91052e30ca2b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.246156] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1795.246156] env[63371]: value = "task-1774791" [ 1795.246156] env[63371]: _type = "Task" [ 1795.246156] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.256471] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774791, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.274843] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.332932] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.370s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.332932] env[63371]: DEBUG nova.compute.manager [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=63371) _confirm_resize /opt/stack/nova/nova/compute/manager.py:4910}} [ 1795.335844] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.206s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.336074] env[63371]: DEBUG nova.objects.instance [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lazy-loading 'resources' on Instance uuid 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1795.446898] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1795.504936] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b0fa9b-26f6-23e3-102c-51daf3513bb4, 'name': SearchDatastore_Task, 'duration_secs': 0.017491} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.505235] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.505488] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9885de9e-c640-4d82-a47a-980988d89deb/9885de9e-c640-4d82-a47a-980988d89deb.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1795.505746] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86253c31-fd89-4e94-a1d6-d7081e15222c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.512658] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1795.512658] env[63371]: value = "task-1774792" [ 1795.512658] env[63371]: _type = "Task" [ 1795.512658] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.520593] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774792, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.755906] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774791, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.903448] env[63371]: INFO nova.scheduler.client.report [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleted allocation for migration c0049d9e-3f16-4dab-89a1-5e74800f317c [ 1796.026819] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774792, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.193439] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0abbd0-b07c-43a3-9513-94af68c07424 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.201363] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd77adef-2365-4887-8bb8-887db53a232c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.231874] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8639fc85-9e1a-4de6-9d34-9b1577a69427 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.240112] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6844fa01-d0b0-4c3e-a522-58d684110143 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.255285] env[63371]: DEBUG nova.compute.provider_tree [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1796.265035] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774791, 'name': ReconfigVM_Task, 'duration_secs': 0.808639} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.265667] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 1ec21edd-7b7c-4a2b-983f-8aa6c022e033/1ec21edd-7b7c-4a2b-983f-8aa6c022e033.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1796.266321] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54018a11-9aa1-4f57-968d-88ebea95befd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.272738] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1796.272738] env[63371]: value = "task-1774793" [ 1796.272738] env[63371]: _type = "Task" [ 1796.272738] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.276252] env[63371]: DEBUG nova.objects.instance [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'flavor' on Instance uuid 9862b0f0-ccf6-4e69-9e78-cf864adaa65e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1796.283227] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774793, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.414814] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0d539a29-9825-45f3-9a31-d2e13100355f tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 29.037s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1796.526129] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774792, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.703749} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.526472] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9885de9e-c640-4d82-a47a-980988d89deb/9885de9e-c640-4d82-a47a-980988d89deb.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1796.526606] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1796.526876] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9941677c-5875-4556-a833-933cac2ad43e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.532671] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1796.532671] env[63371]: value = "task-1774794" [ 1796.532671] env[63371]: _type = "Task" [ 1796.532671] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.540284] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774794, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.761026] env[63371]: DEBUG nova.scheduler.client.report [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1796.783742] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774793, 'name': Rename_Task, 'duration_secs': 0.391839} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.784212] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.784367] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.784534] env[63371]: DEBUG nova.network.neutron [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1796.784700] env[63371]: DEBUG nova.objects.instance [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'info_cache' on Instance uuid 9862b0f0-ccf6-4e69-9e78-cf864adaa65e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1796.785784] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1796.786350] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7351f81f-8077-434b-84ac-2051b57c9bf7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.792651] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1796.792651] env[63371]: value = "task-1774795" [ 1796.792651] env[63371]: _type = "Task" [ 1796.792651] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.800618] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774795, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.043620] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774794, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.138018} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.043916] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1797.044733] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19def1dd-8352-49f0-9e48-34a265ae2367 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.066832] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 9885de9e-c640-4d82-a47a-980988d89deb/9885de9e-c640-4d82-a47a-980988d89deb.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1797.067401] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-205fa6ee-fd15-41fc-8447-66d047fcea09 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.087320] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1797.087320] env[63371]: value = "task-1774796" [ 1797.087320] env[63371]: _type = "Task" [ 1797.087320] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.095447] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774796, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.266020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.930s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.268311] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 22.022s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.268502] env[63371]: DEBUG nova.objects.instance [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1797.282790] env[63371]: INFO nova.scheduler.client.report [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted allocations for instance 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8 [ 1797.287557] env[63371]: DEBUG nova.objects.base [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Object Instance<9862b0f0-ccf6-4e69-9e78-cf864adaa65e> lazy-loaded attributes: flavor,info_cache {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1797.303512] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774795, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.597400] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774796, 'name': ReconfigVM_Task, 'duration_secs': 0.31359} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.597678] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 9885de9e-c640-4d82-a47a-980988d89deb/9885de9e-c640-4d82-a47a-980988d89deb.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1797.598330] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b7eb7c7-a657-40a6-956e-0c8bff0389d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.604919] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1797.604919] env[63371]: value = "task-1774797" [ 1797.604919] env[63371]: _type = "Task" [ 1797.604919] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.612506] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774797, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.794069] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d3f0bfc1-b844-45f1-a6aa-5228ba6ec387 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "0c8c6997-bec8-4a3b-80cf-cbf35f3843f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.109s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.806431] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774795, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.114721] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774797, 'name': Rename_Task, 'duration_secs': 0.168357} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.115015] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1798.115275] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4c07b1c-9195-445a-b3a6-ed333e47cec8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.119584] env[63371]: DEBUG nova.network.neutron [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance_info_cache with network_info: [{"id": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "address": "fa:16:3e:50:09:23", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap82aece5e-dc", "ovs_interfaceid": "82aece5e-dc40-4c18-a1a9-4b4e859fef2a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1798.122216] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1798.122216] env[63371]: value = "task-1774798" [ 1798.122216] env[63371]: _type = "Task" [ 1798.122216] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.129953] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774798, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.277347] env[63371]: DEBUG oslo_concurrency.lockutils [None req-76905475-5e1e-4b19-9fd3-5decbebfca9b tempest-ServersAdmin275Test-1459298935 tempest-ServersAdmin275Test-1459298935-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.278531] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.278757] env[63371]: DEBUG nova.objects.instance [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lazy-loading 'resources' on Instance uuid f391d4f3-6e9d-4ddc-918a-8dc8581dfc00 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1798.305049] env[63371]: DEBUG oslo_vmware.api [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774795, 'name': PowerOnVM_Task, 'duration_secs': 1.205513} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.305304] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1798.305503] env[63371]: INFO nova.compute.manager [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Took 9.51 seconds to spawn the instance on the hypervisor. [ 1798.305677] env[63371]: DEBUG nova.compute.manager [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1798.306522] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2402aa-004b-4970-84db-8c1b403fe61b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.623658] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-9862b0f0-ccf6-4e69-9e78-cf864adaa65e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.634426] env[63371]: DEBUG oslo_vmware.api [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774798, 'name': PowerOnVM_Task, 'duration_secs': 0.445} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.635287] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1798.635494] env[63371]: INFO nova.compute.manager [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Took 7.43 seconds to spawn the instance on the hypervisor. [ 1798.635670] env[63371]: DEBUG nova.compute.manager [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1798.636438] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a422dd-0698-4f28-bb62-6c14440f9b5d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.760117] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "1689fc63-3c07-4517-bbef-0011d860e9fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.760349] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.823589] env[63371]: INFO nova.compute.manager [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Took 32.42 seconds to build instance. [ 1799.021055] env[63371]: INFO nova.compute.manager [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Rescuing [ 1799.021323] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "refresh_cache-1ec21edd-7b7c-4a2b-983f-8aa6c022e033" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.021473] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "refresh_cache-1ec21edd-7b7c-4a2b-983f-8aa6c022e033" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.021633] env[63371]: DEBUG nova.network.neutron [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1799.024901] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4ba5c00-3906-44c8-9f00-abdb150d4511 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.032609] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24cdf491-fb02-4972-9311-b04e6d4eb3a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.063751] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19802825-930d-4fea-9728-d0cd99d3ef0f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.071442] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-342b1e79-61c2-4f17-a5a1-f5a2cc94b507 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.085884] env[63371]: DEBUG nova.compute.provider_tree [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1799.130335] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1799.130589] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9118c939-38dc-47b0-9252-543d01067367 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.138336] env[63371]: DEBUG oslo_vmware.api [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1799.138336] env[63371]: value = "task-1774799" [ 1799.138336] env[63371]: _type = "Task" [ 1799.138336] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.150394] env[63371]: DEBUG oslo_vmware.api [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774799, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.152571] env[63371]: INFO nova.compute.manager [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Took 30.99 seconds to build instance. [ 1799.262402] env[63371]: DEBUG nova.compute.manager [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1799.325579] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3dc94655-c67d-4e85-9c95-15e1df8a35ae tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.934s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.588641] env[63371]: DEBUG nova.scheduler.client.report [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1799.652740] env[63371]: DEBUG oslo_vmware.api [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774799, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.655343] env[63371]: DEBUG oslo_concurrency.lockutils [None req-985182dc-4a79-4e00-9ba3-8abd4e173503 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "9885de9e-c640-4d82-a47a-980988d89deb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.505s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1799.781881] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1799.806816] env[63371]: DEBUG nova.network.neutron [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Updating instance_info_cache with network_info: [{"id": "f560031e-f701-4309-aead-34a87be57b22", "address": "fa:16:3e:63:fd:af", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf560031e-f7", "ovs_interfaceid": "f560031e-f701-4309-aead-34a87be57b22", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.094129] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.815s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.096933] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.633s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.097177] env[63371]: DEBUG nova.objects.instance [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Lazy-loading 'resources' on Instance uuid 943e2506-03a4-4633-b55b-381d9d8d9ef6 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1800.118725] env[63371]: INFO nova.scheduler.client.report [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Deleted allocations for instance f391d4f3-6e9d-4ddc-918a-8dc8581dfc00 [ 1800.149048] env[63371]: DEBUG oslo_vmware.api [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774799, 'name': PowerOnVM_Task, 'duration_secs': 0.814172} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.149233] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1800.149422] env[63371]: DEBUG nova.compute.manager [None req-ff1b79be-b43c-4617-98a5-89b792524650 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1800.150238] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4acf85-8f71-4e93-80bf-d9ec6618def1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.270656] env[63371]: DEBUG nova.compute.manager [req-db1d74cf-058c-4fd6-ab2a-7d71b9a03b65 req-98a7592e-370e-40f3-9d7a-41e6191fa11a service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Received event network-changed-f5b22240-e8c4-447a-bc92-3a83ae9674ec {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1800.270934] env[63371]: DEBUG nova.compute.manager [req-db1d74cf-058c-4fd6-ab2a-7d71b9a03b65 req-98a7592e-370e-40f3-9d7a-41e6191fa11a service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Refreshing instance network info cache due to event network-changed-f5b22240-e8c4-447a-bc92-3a83ae9674ec. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1800.271222] env[63371]: DEBUG oslo_concurrency.lockutils [req-db1d74cf-058c-4fd6-ab2a-7d71b9a03b65 req-98a7592e-370e-40f3-9d7a-41e6191fa11a service nova] Acquiring lock "refresh_cache-9885de9e-c640-4d82-a47a-980988d89deb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1800.271423] env[63371]: DEBUG oslo_concurrency.lockutils [req-db1d74cf-058c-4fd6-ab2a-7d71b9a03b65 req-98a7592e-370e-40f3-9d7a-41e6191fa11a service nova] Acquired lock "refresh_cache-9885de9e-c640-4d82-a47a-980988d89deb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1800.271624] env[63371]: DEBUG nova.network.neutron [req-db1d74cf-058c-4fd6-ab2a-7d71b9a03b65 req-98a7592e-370e-40f3-9d7a-41e6191fa11a service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Refreshing network info cache for port f5b22240-e8c4-447a-bc92-3a83ae9674ec {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1800.309261] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "refresh_cache-1ec21edd-7b7c-4a2b-983f-8aa6c022e033" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.628162] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fc46ad04-b8a8-41f8-abec-668b7e9d9be1 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "f391d4f3-6e9d-4ddc-918a-8dc8581dfc00" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.587s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.842403] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1800.843431] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ad432cc-545d-4aea-99a0-819bcab343bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.856025] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1800.856025] env[63371]: value = "task-1774800" [ 1800.856025] env[63371]: _type = "Task" [ 1800.856025] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.873911] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774800, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.881208] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661817e6-f14b-489a-b67e-befddf89a7a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.889101] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503420b7-f6d7-47a3-a35a-d692c7a5b158 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.922883] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edc52aa2-0a5b-4542-a255-5e71ba197150 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.931054] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff36fa7e-0bfb-4638-8daa-2330b9a7cbea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.945497] env[63371]: DEBUG nova.compute.provider_tree [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1800.976023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.976382] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.976596] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.976834] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.977075] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.979139] env[63371]: INFO nova.compute.manager [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Terminating instance [ 1800.981033] env[63371]: DEBUG nova.compute.manager [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1800.981249] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1800.982174] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4b8683-3f2a-450e-91d5-f972b84c9caf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.990199] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1800.990489] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c65ba86c-42a5-4a04-b7a0-ac2cc85832ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.000257] env[63371]: DEBUG oslo_vmware.api [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1801.000257] env[63371]: value = "task-1774801" [ 1801.000257] env[63371]: _type = "Task" [ 1801.000257] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.008132] env[63371]: DEBUG oslo_vmware.api [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774801, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.040275] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "11527051-7a4f-481a-b5ed-14550c550c4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.040547] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "11527051-7a4f-481a-b5ed-14550c550c4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.040740] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "11527051-7a4f-481a-b5ed-14550c550c4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.040944] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "11527051-7a4f-481a-b5ed-14550c550c4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.041140] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "11527051-7a4f-481a-b5ed-14550c550c4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.044118] env[63371]: INFO nova.compute.manager [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Terminating instance [ 1801.045873] env[63371]: DEBUG nova.compute.manager [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1801.046093] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1801.046889] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63837e9f-f9e0-47df-b58f-923503f8080d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.054358] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1801.054586] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-06b7923a-0226-49e7-a6f3-1aefcdde56a1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.060593] env[63371]: DEBUG oslo_vmware.api [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1801.060593] env[63371]: value = "task-1774802" [ 1801.060593] env[63371]: _type = "Task" [ 1801.060593] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.065851] env[63371]: DEBUG nova.network.neutron [req-db1d74cf-058c-4fd6-ab2a-7d71b9a03b65 req-98a7592e-370e-40f3-9d7a-41e6191fa11a service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Updated VIF entry in instance network info cache for port f5b22240-e8c4-447a-bc92-3a83ae9674ec. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1801.066220] env[63371]: DEBUG nova.network.neutron [req-db1d74cf-058c-4fd6-ab2a-7d71b9a03b65 req-98a7592e-370e-40f3-9d7a-41e6191fa11a service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Updating instance_info_cache with network_info: [{"id": "f5b22240-e8c4-447a-bc92-3a83ae9674ec", "address": "fa:16:3e:bf:a3:51", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5b22240-e8", "ovs_interfaceid": "f5b22240-e8c4-447a-bc92-3a83ae9674ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1801.070180] env[63371]: DEBUG oslo_vmware.api [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774802, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.364053] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774800, 'name': PowerOffVM_Task, 'duration_secs': 0.35235} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.364053] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1801.364811] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aabe393d-3a96-44f3-b465-f9c51e7bea2d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.385821] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8f3e764-ebb9-4888-878d-24d7b8f1d13f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.420792] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1801.421158] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bd92248-17cb-4fc9-9111-7e1cbb256a40 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.429180] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1801.429180] env[63371]: value = "task-1774803" [ 1801.429180] env[63371]: _type = "Task" [ 1801.429180] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.437767] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1801.437975] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1801.438241] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.438389] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.438590] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1801.438812] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12ce75f2-8e92-4c58-9369-6eb37ea229b7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.446528] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1801.446706] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1801.447433] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dabef6cc-e58e-484e-a2e4-67818728d3c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.450371] env[63371]: DEBUG nova.scheduler.client.report [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1801.456577] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1801.456577] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528f2630-3119-10aa-3c92-6e1834e059bf" [ 1801.456577] env[63371]: _type = "Task" [ 1801.456577] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.464692] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528f2630-3119-10aa-3c92-6e1834e059bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.510011] env[63371]: DEBUG oslo_vmware.api [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774801, 'name': PowerOffVM_Task, 'duration_secs': 0.211721} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.510300] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1801.510462] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1801.510710] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a83325af-199a-4ee6-8243-c8559d3be0f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.571935] env[63371]: DEBUG oslo_concurrency.lockutils [req-db1d74cf-058c-4fd6-ab2a-7d71b9a03b65 req-98a7592e-370e-40f3-9d7a-41e6191fa11a service nova] Releasing lock "refresh_cache-9885de9e-c640-4d82-a47a-980988d89deb" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1801.572401] env[63371]: DEBUG oslo_vmware.api [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774802, 'name': PowerOffVM_Task, 'duration_secs': 0.267479} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.572635] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1801.572796] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1801.573084] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0fd966aa-a0b4-4d93-9045-51f27b86185f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.587226] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1801.587443] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1801.587623] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleting the datastore file [datastore1] 9862b0f0-ccf6-4e69-9e78-cf864adaa65e {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1801.587875] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-561cdfcc-9ffb-41fd-96d1-b25ef9b0a3fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.594375] env[63371]: DEBUG oslo_vmware.api [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1801.594375] env[63371]: value = "task-1774806" [ 1801.594375] env[63371]: _type = "Task" [ 1801.594375] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.603929] env[63371]: DEBUG oslo_vmware.api [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774806, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.722753] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1801.723038] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1801.723247] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Deleting the datastore file [datastore1] 11527051-7a4f-481a-b5ed-14550c550c4e {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1801.723550] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aef36f00-26bc-45f6-b1bc-d5239830ea60 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.730056] env[63371]: DEBUG oslo_vmware.api [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for the task: (returnval){ [ 1801.730056] env[63371]: value = "task-1774807" [ 1801.730056] env[63371]: _type = "Task" [ 1801.730056] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.738970] env[63371]: DEBUG oslo_vmware.api [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.958585] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.861s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.960703] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.377s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.962303] env[63371]: INFO nova.compute.claims [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1801.974849] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528f2630-3119-10aa-3c92-6e1834e059bf, 'name': SearchDatastore_Task, 'duration_secs': 0.009665} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.976024] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-805c2e74-673c-4dda-8302-546c588320f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.981045] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1801.981045] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b8685e-fd9a-e979-e2e4-cf2a0799f4dc" [ 1801.981045] env[63371]: _type = "Task" [ 1801.981045] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.981715] env[63371]: INFO nova.scheduler.client.report [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Deleted allocations for instance 943e2506-03a4-4633-b55b-381d9d8d9ef6 [ 1801.993815] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b8685e-fd9a-e979-e2e4-cf2a0799f4dc, 'name': SearchDatastore_Task, 'duration_secs': 0.009242} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.994123] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1801.994402] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1ec21edd-7b7c-4a2b-983f-8aa6c022e033/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. {{(pid=63371) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1801.994661] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df03e5d9-0c45-4095-a0b2-ab4849c63bab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.000509] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1802.000509] env[63371]: value = "task-1774808" [ 1802.000509] env[63371]: _type = "Task" [ 1802.000509] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1802.009120] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.104154] env[63371]: DEBUG oslo_vmware.api [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774806, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173186} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.104415] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1802.104596] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1802.104767] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1802.104938] env[63371]: INFO nova.compute.manager [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1802.105195] env[63371]: DEBUG oslo.service.loopingcall [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1802.105381] env[63371]: DEBUG nova.compute.manager [-] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1802.105474] env[63371]: DEBUG nova.network.neutron [-] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1802.246268] env[63371]: DEBUG oslo_vmware.api [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Task: {'id': task-1774807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151496} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.246677] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1802.246955] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1802.247254] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1802.247524] env[63371]: INFO nova.compute.manager [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1802.247874] env[63371]: DEBUG oslo.service.loopingcall [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1802.248176] env[63371]: DEBUG nova.compute.manager [-] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1802.248329] env[63371]: DEBUG nova.network.neutron [-] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1802.465757] env[63371]: DEBUG nova.compute.manager [req-d34b0fe9-1a53-4323-9d02-b4e24b5c51ec req-7c5fd097-6d99-4dc1-90d8-b406146dfcf6 service nova] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Received event network-vif-deleted-5d6f97e2-eb9c-468d-8931-77a4c10ff125 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1802.465945] env[63371]: INFO nova.compute.manager [req-d34b0fe9-1a53-4323-9d02-b4e24b5c51ec req-7c5fd097-6d99-4dc1-90d8-b406146dfcf6 service nova] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Neutron deleted interface 5d6f97e2-eb9c-468d-8931-77a4c10ff125; detaching it from the instance and deleting it from the info cache [ 1802.466138] env[63371]: DEBUG nova.network.neutron [req-d34b0fe9-1a53-4323-9d02-b4e24b5c51ec req-7c5fd097-6d99-4dc1-90d8-b406146dfcf6 service nova] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.492226] env[63371]: DEBUG oslo_concurrency.lockutils [None req-83202955-a83e-4d58-be36-860d7e1b418e tempest-ServerTagsTestJSON-2007345903 tempest-ServerTagsTestJSON-2007345903-project-member] Lock "943e2506-03a4-4633-b55b-381d9d8d9ef6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.059s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1802.511598] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774808, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.591646] env[63371]: DEBUG nova.compute.manager [req-e1f84419-49e5-44db-9b1b-fa9f01ab0a57 req-6153084b-c5ff-43f9-8491-0f9a9a59a275 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Received event network-vif-deleted-82aece5e-dc40-4c18-a1a9-4b4e859fef2a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1802.591829] env[63371]: INFO nova.compute.manager [req-e1f84419-49e5-44db-9b1b-fa9f01ab0a57 req-6153084b-c5ff-43f9-8491-0f9a9a59a275 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Neutron deleted interface 82aece5e-dc40-4c18-a1a9-4b4e859fef2a; detaching it from the instance and deleting it from the info cache [ 1802.592237] env[63371]: DEBUG nova.network.neutron [req-e1f84419-49e5-44db-9b1b-fa9f01ab0a57 req-6153084b-c5ff-43f9-8491-0f9a9a59a275 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.944370] env[63371]: DEBUG nova.network.neutron [-] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1802.973428] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6e19faa6-3c0c-44ff-89d9-16023711eec0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.988021] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff15d3c-b08b-4143-a370-26ccdf494a92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.010037] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.833397} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.010354] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1ec21edd-7b7c-4a2b-983f-8aa6c022e033/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. [ 1803.021176] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a07fa948-3ca6-4321-afaf-12fc2bbf6036 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.023800] env[63371]: DEBUG nova.compute.manager [req-d34b0fe9-1a53-4323-9d02-b4e24b5c51ec req-7c5fd097-6d99-4dc1-90d8-b406146dfcf6 service nova] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Detach interface failed, port_id=5d6f97e2-eb9c-468d-8931-77a4c10ff125, reason: Instance 11527051-7a4f-481a-b5ed-14550c550c4e could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1803.048544] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 1ec21edd-7b7c-4a2b-983f-8aa6c022e033/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1803.051728] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60567357-bff8-4de6-b2e7-40ce3b7a4d44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.067824] env[63371]: DEBUG nova.network.neutron [-] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.070416] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1803.070416] env[63371]: value = "task-1774809" [ 1803.070416] env[63371]: _type = "Task" [ 1803.070416] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.080780] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774809, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.099433] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9083ab77-c407-4f09-b766-0f75278f4544 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.109505] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913f4e36-b8db-47c4-b058-4c0f230652a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.143059] env[63371]: DEBUG nova.compute.manager [req-e1f84419-49e5-44db-9b1b-fa9f01ab0a57 req-6153084b-c5ff-43f9-8491-0f9a9a59a275 service nova] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Detach interface failed, port_id=82aece5e-dc40-4c18-a1a9-4b4e859fef2a, reason: Instance 9862b0f0-ccf6-4e69-9e78-cf864adaa65e could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1803.339566] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55b9975-ce72-4c5c-bb42-044bcd75a909 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.349687] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3048e34e-cce9-4246-bbff-e38f48fa10ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.388037] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be2f535-644e-4409-a85d-7e3ba1f292f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.395734] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883045f3-3a0e-4d1f-bc6c-c7c6841bff07 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.410650] env[63371]: DEBUG nova.compute.provider_tree [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1803.447664] env[63371]: INFO nova.compute.manager [-] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Took 1.20 seconds to deallocate network for instance. [ 1803.572091] env[63371]: INFO nova.compute.manager [-] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Took 1.47 seconds to deallocate network for instance. [ 1803.585609] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774809, 'name': ReconfigVM_Task, 'duration_secs': 0.318046} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1803.585879] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 1ec21edd-7b7c-4a2b-983f-8aa6c022e033/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1803.586998] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0fceda-0cc8-4c80-8105-8a618fd938ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.613672] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-008a4b40-0ea5-4fc0-9f9c-e143ead81fc2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1803.632955] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1803.632955] env[63371]: value = "task-1774810" [ 1803.632955] env[63371]: _type = "Task" [ 1803.632955] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.641354] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774810, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.913857] env[63371]: DEBUG nova.scheduler.client.report [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1803.954045] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.082229] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.148251] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774810, 'name': ReconfigVM_Task, 'duration_secs': 0.144325} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.148251] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1804.148251] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74ea7e16-0c36-45d4-ac3d-0db40967f1fc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.157519] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1804.157519] env[63371]: value = "task-1774811" [ 1804.157519] env[63371]: _type = "Task" [ 1804.157519] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.173975] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774811, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.418783] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.458s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.419177] env[63371]: DEBUG nova.compute.manager [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1804.422267] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.037s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.422520] env[63371]: DEBUG nova.objects.instance [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lazy-loading 'resources' on Instance uuid e16e4a55-4198-4308-b12c-d9ac07daecad {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1804.668758] env[63371]: DEBUG oslo_vmware.api [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774811, 'name': PowerOnVM_Task, 'duration_secs': 0.422985} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.669138] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1804.671919] env[63371]: DEBUG nova.compute.manager [None req-6e8002a5-2082-419d-8f33-19f3050fdffa tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1804.672802] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e05f0163-1e52-45ad-b093-91d757abd500 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.925708] env[63371]: DEBUG nova.compute.utils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1804.927218] env[63371]: DEBUG nova.compute.manager [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1804.927395] env[63371]: DEBUG nova.network.neutron [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1804.986381] env[63371]: DEBUG nova.policy [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f85b2454eed34665b92a1ebc087353c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f98ab0107f5040139ef8be7c3ae22207', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1805.181517] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1254e6f-a587-4c28-91d8-7b195ebb7c25 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.192971] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0188d8-c7a0-4e8e-90fe-d62ad09a40e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.224558] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ebf4b2-7361-4bcc-b97f-245ef36a655e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.233095] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6dd8c1-616d-432e-b199-f5ef7200f660 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.248049] env[63371]: DEBUG nova.compute.provider_tree [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1805.433551] env[63371]: DEBUG nova.compute.manager [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1805.553739] env[63371]: DEBUG nova.network.neutron [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Successfully created port: 45c89cd7-4637-40af-9652-42cad1269c7e {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1805.750755] env[63371]: DEBUG nova.scheduler.client.report [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1806.259576] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.837s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.262091] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.093s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.262340] env[63371]: DEBUG nova.objects.instance [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'resources' on Instance uuid d00602b9-16bf-4c11-bc47-6076dddbf159 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1806.284613] env[63371]: INFO nova.scheduler.client.report [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Deleted allocations for instance e16e4a55-4198-4308-b12c-d9ac07daecad [ 1806.444475] env[63371]: DEBUG nova.compute.manager [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1806.472372] env[63371]: DEBUG nova.virt.hardware [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1806.472634] env[63371]: DEBUG nova.virt.hardware [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1806.472791] env[63371]: DEBUG nova.virt.hardware [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1806.473027] env[63371]: DEBUG nova.virt.hardware [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1806.473174] env[63371]: DEBUG nova.virt.hardware [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1806.473322] env[63371]: DEBUG nova.virt.hardware [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1806.473532] env[63371]: DEBUG nova.virt.hardware [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1806.473690] env[63371]: DEBUG nova.virt.hardware [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1806.473857] env[63371]: DEBUG nova.virt.hardware [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1806.474058] env[63371]: DEBUG nova.virt.hardware [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1806.474252] env[63371]: DEBUG nova.virt.hardware [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1806.475204] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52504bb5-6768-4246-a7b0-099c92cb51eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.483789] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7e6a07-82aa-4649-a0ce-0729c6790ce7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.793546] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d93416a-736e-4e42-b354-e6a75780ae57 tempest-ServersAdmin275Test-858740018 tempest-ServersAdmin275Test-858740018-project-member] Lock "e16e4a55-4198-4308-b12c-d9ac07daecad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.177s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.013861] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af37bf73-53bb-424d-b111-2ae72f0f3260 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.021739] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1cbcd08-e47e-4846-b850-5ec8bf401442 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.055769] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f86847-701e-4b9f-8679-bb5ec97812e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.064817] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04cb0b5a-c59d-42e9-acc5-a56c13ca8032 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.079963] env[63371]: DEBUG nova.compute.provider_tree [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1807.133290] env[63371]: DEBUG nova.compute.manager [req-fd5dd841-114b-4a80-ba77-6a4d4975d621 req-c8643d22-3bd2-4be4-8fab-a6085488b18e service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Received event network-vif-plugged-45c89cd7-4637-40af-9652-42cad1269c7e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1807.133593] env[63371]: DEBUG oslo_concurrency.lockutils [req-fd5dd841-114b-4a80-ba77-6a4d4975d621 req-c8643d22-3bd2-4be4-8fab-a6085488b18e service nova] Acquiring lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.133886] env[63371]: DEBUG oslo_concurrency.lockutils [req-fd5dd841-114b-4a80-ba77-6a4d4975d621 req-c8643d22-3bd2-4be4-8fab-a6085488b18e service nova] Lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.134200] env[63371]: DEBUG oslo_concurrency.lockutils [req-fd5dd841-114b-4a80-ba77-6a4d4975d621 req-c8643d22-3bd2-4be4-8fab-a6085488b18e service nova] Lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.134464] env[63371]: DEBUG nova.compute.manager [req-fd5dd841-114b-4a80-ba77-6a4d4975d621 req-c8643d22-3bd2-4be4-8fab-a6085488b18e service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] No waiting events found dispatching network-vif-plugged-45c89cd7-4637-40af-9652-42cad1269c7e {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1807.134925] env[63371]: WARNING nova.compute.manager [req-fd5dd841-114b-4a80-ba77-6a4d4975d621 req-c8643d22-3bd2-4be4-8fab-a6085488b18e service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Received unexpected event network-vif-plugged-45c89cd7-4637-40af-9652-42cad1269c7e for instance with vm_state building and task_state spawning. [ 1807.243072] env[63371]: DEBUG nova.network.neutron [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Successfully updated port: 45c89cd7-4637-40af-9652-42cad1269c7e {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1807.444439] env[63371]: INFO nova.compute.manager [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Rescuing [ 1807.444738] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "refresh_cache-0518c5a8-8cc1-4829-a0cf-5f5904f6df86" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.444998] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "refresh_cache-0518c5a8-8cc1-4829-a0cf-5f5904f6df86" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.444998] env[63371]: DEBUG nova.network.neutron [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1807.584123] env[63371]: DEBUG nova.scheduler.client.report [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1807.746205] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.746398] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.746555] env[63371]: DEBUG nova.network.neutron [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1808.088788] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.827s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.091079] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.222s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.092808] env[63371]: INFO nova.compute.claims [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1808.120540] env[63371]: INFO nova.scheduler.client.report [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleted allocations for instance d00602b9-16bf-4c11-bc47-6076dddbf159 [ 1808.180090] env[63371]: DEBUG nova.network.neutron [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Updating instance_info_cache with network_info: [{"id": "e4ee0c90-4a70-4f4e-b976-34412c13da2f", "address": "fa:16:3e:2d:a3:3a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4ee0c90-4a", "ovs_interfaceid": "e4ee0c90-4a70-4f4e-b976-34412c13da2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.282502] env[63371]: DEBUG nova.network.neutron [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1808.434747] env[63371]: DEBUG nova.network.neutron [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Updating instance_info_cache with network_info: [{"id": "45c89cd7-4637-40af-9652-42cad1269c7e", "address": "fa:16:3e:04:2f:b7", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45c89cd7-46", "ovs_interfaceid": "45c89cd7-4637-40af-9652-42cad1269c7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.631922] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2c290e9a-4f1a-480e-891a-30e27f916aa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "d00602b9-16bf-4c11-bc47-6076dddbf159" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.756s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.632933] env[63371]: DEBUG oslo_concurrency.lockutils [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] Acquired lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1808.633999] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-624f99e3-037d-4bdf-9f68-84c1c35e745b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.642739] env[63371]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1808.642899] env[63371]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=63371) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1808.643298] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-054b079a-9019-4eee-b71e-c6285d48ebe9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.651777] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc0aa2a-3560-4b0d-9ac4-44fdc1caed6f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.682784] env[63371]: ERROR root [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-368377' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-368377' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-368377' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-368377'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-368377' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-368377' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-368377'}\n"]: nova.exception.InstanceNotFound: Instance d00602b9-16bf-4c11-bc47-6076dddbf159 could not be found. [ 1808.682784] env[63371]: DEBUG oslo_concurrency.lockutils [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] Releasing lock "d00602b9-16bf-4c11-bc47-6076dddbf159" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.682784] env[63371]: DEBUG nova.compute.manager [req-9bf3cef4-1fa3-4386-a7f8-d4598be38f98 req-3c201784-d96f-4bf4-a52a-b3736e14e5c4 service nova] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Detach interface failed, port_id=57835801-cbba-4176-8f6b-8d0ec76aa66e, reason: Instance d00602b9-16bf-4c11-bc47-6076dddbf159 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1808.683311] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "refresh_cache-0518c5a8-8cc1-4829-a0cf-5f5904f6df86" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.936578] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.936578] env[63371]: DEBUG nova.compute.manager [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Instance network_info: |[{"id": "45c89cd7-4637-40af-9652-42cad1269c7e", "address": "fa:16:3e:04:2f:b7", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45c89cd7-46", "ovs_interfaceid": "45c89cd7-4637-40af-9652-42cad1269c7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1808.937040] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:2f:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45c89cd7-4637-40af-9652-42cad1269c7e', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1808.947020] env[63371]: DEBUG oslo.service.loopingcall [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1808.947020] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1808.947191] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-026f5e38-c622-4f31-9bdb-fde024c76b09 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.968331] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1808.968331] env[63371]: value = "task-1774812" [ 1808.968331] env[63371]: _type = "Task" [ 1808.968331] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.978323] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774812, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.159975] env[63371]: DEBUG nova.compute.manager [req-bf1dd309-becd-40cc-959e-da11825889ac req-107290d2-22ee-49db-8e4c-f92113fec735 service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Received event network-changed-45c89cd7-4637-40af-9652-42cad1269c7e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1809.159975] env[63371]: DEBUG nova.compute.manager [req-bf1dd309-becd-40cc-959e-da11825889ac req-107290d2-22ee-49db-8e4c-f92113fec735 service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Refreshing instance network info cache due to event network-changed-45c89cd7-4637-40af-9652-42cad1269c7e. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1809.160290] env[63371]: DEBUG oslo_concurrency.lockutils [req-bf1dd309-becd-40cc-959e-da11825889ac req-107290d2-22ee-49db-8e4c-f92113fec735 service nova] Acquiring lock "refresh_cache-3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.160453] env[63371]: DEBUG oslo_concurrency.lockutils [req-bf1dd309-becd-40cc-959e-da11825889ac req-107290d2-22ee-49db-8e4c-f92113fec735 service nova] Acquired lock "refresh_cache-3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.160610] env[63371]: DEBUG nova.network.neutron [req-bf1dd309-becd-40cc-959e-da11825889ac req-107290d2-22ee-49db-8e4c-f92113fec735 service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Refreshing network info cache for port 45c89cd7-4637-40af-9652-42cad1269c7e {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1809.222353] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1809.222662] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44b3a18f-b8f5-48cc-a35f-cfe66120b3cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.235424] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1809.235424] env[63371]: value = "task-1774813" [ 1809.235424] env[63371]: _type = "Task" [ 1809.235424] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.248324] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.403603] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb566d03-1090-4405-b7c1-aac42cad9af4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.411746] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da698d69-30f8-4464-a51e-c082679d4c97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.447928] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab2e335-553d-4fdb-a03e-f3c193a3eb74 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.456566] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2ed854-e54a-4e14-ae29-7a4877033f95 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.473346] env[63371]: DEBUG nova.compute.provider_tree [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1809.484267] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774812, 'name': CreateVM_Task, 'duration_secs': 0.422932} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.484501] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1809.485203] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.485755] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.485755] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1809.485997] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b781d8a-7de9-479c-865d-8a7eb23d49b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.491716] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1809.491716] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c66db1-7a68-0557-c367-2372935da637" [ 1809.491716] env[63371]: _type = "Task" [ 1809.491716] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.500627] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c66db1-7a68-0557-c367-2372935da637, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.747469] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774813, 'name': PowerOffVM_Task, 'duration_secs': 0.317635} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.747469] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1809.748378] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d3a300-31d1-4acd-b6a2-dbae5bae1917 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.772201] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af210fb-f3c3-4a62-a2b7-948820b2a992 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.800642] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1809.800930] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30776212-d39b-4336-b4ca-2eb73c3482a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.808114] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1809.808114] env[63371]: value = "task-1774814" [ 1809.808114] env[63371]: _type = "Task" [ 1809.808114] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.817075] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1809.817304] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1809.817802] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.817979] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.819202] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1809.819202] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d284ef26-6e22-4b7f-a1a7-891f6c39aec6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.827650] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1809.827831] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1809.830845] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0136a8d2-04d2-4597-930f-d9797d8ff144 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.837108] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1809.837108] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52865dea-ac07-c865-ed59-17e0d51c4a79" [ 1809.837108] env[63371]: _type = "Task" [ 1809.837108] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.847226] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52865dea-ac07-c865-ed59-17e0d51c4a79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.979615] env[63371]: DEBUG nova.scheduler.client.report [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1809.985218] env[63371]: DEBUG nova.network.neutron [req-bf1dd309-becd-40cc-959e-da11825889ac req-107290d2-22ee-49db-8e4c-f92113fec735 service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Updated VIF entry in instance network info cache for port 45c89cd7-4637-40af-9652-42cad1269c7e. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1809.985606] env[63371]: DEBUG nova.network.neutron [req-bf1dd309-becd-40cc-959e-da11825889ac req-107290d2-22ee-49db-8e4c-f92113fec735 service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Updating instance_info_cache with network_info: [{"id": "45c89cd7-4637-40af-9652-42cad1269c7e", "address": "fa:16:3e:04:2f:b7", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45c89cd7-46", "ovs_interfaceid": "45c89cd7-4637-40af-9652-42cad1269c7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.003804] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c66db1-7a68-0557-c367-2372935da637, 'name': SearchDatastore_Task, 'duration_secs': 0.012934} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.004141] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.004557] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1810.004775] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.350154] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52865dea-ac07-c865-ed59-17e0d51c4a79, 'name': SearchDatastore_Task, 'duration_secs': 0.008916} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.350154] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50af9fa7-0c83-4305-acd8-de81d6a0e319 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.354972] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1810.354972] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ceeb7e-3d52-df3d-f9ef-347c69e735d3" [ 1810.354972] env[63371]: _type = "Task" [ 1810.354972] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.362777] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ceeb7e-3d52-df3d-f9ef-347c69e735d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.488990] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1810.489604] env[63371]: DEBUG nova.compute.manager [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1810.492349] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.409s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.493387] env[63371]: DEBUG nova.objects.instance [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lazy-loading 'resources' on Instance uuid 9985dbcd-4498-4629-aae5-5e1933307c50 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1810.494531] env[63371]: DEBUG oslo_concurrency.lockutils [req-bf1dd309-becd-40cc-959e-da11825889ac req-107290d2-22ee-49db-8e4c-f92113fec735 service nova] Releasing lock "refresh_cache-3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.661754] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1810.662010] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1810.866073] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ceeb7e-3d52-df3d-f9ef-347c69e735d3, 'name': SearchDatastore_Task, 'duration_secs': 0.011045} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.866406] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.866629] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0518c5a8-8cc1-4829-a0cf-5f5904f6df86/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. {{(pid=63371) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1810.866917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.867218] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1810.867886] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfbe052b-cfaa-486b-830b-cb34c1f67056 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.869773] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06280958-4d7c-4cb2-a880-e815cfe74b02 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.876146] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1810.876146] env[63371]: value = "task-1774815" [ 1810.876146] env[63371]: _type = "Task" [ 1810.876146] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.880522] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1810.880702] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1810.881780] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f712bd23-cfa8-43d0-b206-e8ad7b5bdc5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.887961] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774815, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.889184] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1810.889184] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ef7d02-20fd-5403-f14c-db63ae15ccaf" [ 1810.889184] env[63371]: _type = "Task" [ 1810.889184] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.896881] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ef7d02-20fd-5403-f14c-db63ae15ccaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.998996] env[63371]: DEBUG nova.compute.utils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1811.001130] env[63371]: DEBUG nova.compute.manager [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1811.001307] env[63371]: DEBUG nova.network.neutron [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1811.053458] env[63371]: DEBUG nova.policy [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31b76ca90f31495287b332ebb3001dff', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e96348bcfea1455dad72945c7c36f027', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1811.164405] env[63371]: DEBUG nova.compute.manager [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1811.336726] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8763f0a1-6408-4b52-8196-9f41caa15397 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.346369] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f37cc5-a1ce-4d73-b02c-deafc794dc50 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.393350] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00ef59c-99e1-4d7a-96a7-6c7540e76dfc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.404024] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.404024] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.414600] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774815, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.421269] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ef7d02-20fd-5403-f14c-db63ae15ccaf, 'name': SearchDatastore_Task, 'duration_secs': 0.009634} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.424192] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0be09e8c-42bf-45ba-b992-d88fc1bcb288 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.429542] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-153ca014-594e-4447-8d4b-6273237e9ed9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.437234] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1811.437234] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5250d2b8-7a19-d27b-fd0a-0b615053f093" [ 1811.437234] env[63371]: _type = "Task" [ 1811.437234] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.445831] env[63371]: DEBUG nova.compute.provider_tree [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1811.451562] env[63371]: DEBUG nova.network.neutron [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Successfully created port: f76030ee-9cbe-4574-a686-4feffec912d6 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1811.461137] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5250d2b8-7a19-d27b-fd0a-0b615053f093, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.504253] env[63371]: DEBUG nova.compute.manager [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1811.685011] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.896962] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774815, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59443} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.897318] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 0518c5a8-8cc1-4829-a0cf-5f5904f6df86/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. [ 1811.899049] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f6528c-ca8c-4282-a795-692f7016ff9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.915687] env[63371]: DEBUG nova.compute.manager [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1811.925500] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 0518c5a8-8cc1-4829-a0cf-5f5904f6df86/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1811.926017] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a193787a-d79a-4f81-a0a7-ecf0c8e85a08 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.945466] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1811.945466] env[63371]: value = "task-1774816" [ 1811.945466] env[63371]: _type = "Task" [ 1811.945466] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.954819] env[63371]: DEBUG nova.scheduler.client.report [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1811.962103] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5250d2b8-7a19-d27b-fd0a-0b615053f093, 'name': SearchDatastore_Task, 'duration_secs': 0.023028} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.962567] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1811.962823] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd/3e2f17e7-8c9c-47c0-afb1-55e56eab74fd.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1811.963155] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f36c82a3-1ca1-47f1-b192-83f02f12dcbb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.968969] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774816, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.973905] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1811.973905] env[63371]: value = "task-1774817" [ 1811.973905] env[63371]: _type = "Task" [ 1811.973905] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.983046] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774817, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.446616] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1812.460587] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774816, 'name': ReconfigVM_Task, 'duration_secs': 0.502734} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.460874] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 0518c5a8-8cc1-4829-a0cf-5f5904f6df86/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1812.461763] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b929caed-3d4e-4bad-a791-f005eae666c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.464882] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.973s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.466958] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.086s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.467170] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.468744] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.875s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1812.468953] env[63371]: DEBUG nova.objects.instance [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Lazy-loading 'resources' on Instance uuid 6c2edb87-7a36-4814-ac4a-199cdca1ef68 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1812.494520] env[63371]: INFO nova.scheduler.client.report [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted allocations for instance 3a6c12a7-732f-4a73-a8c5-6810b554cc03 [ 1812.496041] env[63371]: INFO nova.scheduler.client.report [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Deleted allocations for instance 9985dbcd-4498-4629-aae5-5e1933307c50 [ 1812.499621] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19a887df-bd86-4805-a0a8-422bc1deef8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.515708] env[63371]: DEBUG nova.compute.manager [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1812.525920] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774817, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520587} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.526052] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd/3e2f17e7-8c9c-47c0-afb1-55e56eab74fd.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1812.526798] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1812.526798] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1812.526798] env[63371]: value = "task-1774818" [ 1812.526798] env[63371]: _type = "Task" [ 1812.526798] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.526798] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e967ff20-c5ad-4b60-855f-f6186bb90a87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.536709] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1812.536709] env[63371]: value = "task-1774819" [ 1812.536709] env[63371]: _type = "Task" [ 1812.536709] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.544269] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774818, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.547653] env[63371]: DEBUG nova.virt.hardware [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1812.547857] env[63371]: DEBUG nova.virt.hardware [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1812.547988] env[63371]: DEBUG nova.virt.hardware [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1812.548181] env[63371]: DEBUG nova.virt.hardware [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1812.548349] env[63371]: DEBUG nova.virt.hardware [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1812.548520] env[63371]: DEBUG nova.virt.hardware [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1812.549527] env[63371]: DEBUG nova.virt.hardware [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1812.549527] env[63371]: DEBUG nova.virt.hardware [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1812.549527] env[63371]: DEBUG nova.virt.hardware [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1812.549527] env[63371]: DEBUG nova.virt.hardware [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1812.549527] env[63371]: DEBUG nova.virt.hardware [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1812.550575] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21dcbb77-3e8b-40b8-8b10-728e24c27f0a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.556988] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774819, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.562175] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10f6c15-8b4c-4f99-b791-19161697c74e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.001638] env[63371]: DEBUG nova.compute.manager [req-04a0601b-66fe-46f4-813d-a556a7cc3a30 req-31a33caa-e137-46b5-a1e0-ba604f893612 service nova] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Received event network-vif-plugged-f76030ee-9cbe-4574-a686-4feffec912d6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1813.001902] env[63371]: DEBUG oslo_concurrency.lockutils [req-04a0601b-66fe-46f4-813d-a556a7cc3a30 req-31a33caa-e137-46b5-a1e0-ba604f893612 service nova] Acquiring lock "d042bb16-c84d-42bb-af3f-38c08995fd91-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.002861] env[63371]: DEBUG oslo_concurrency.lockutils [req-04a0601b-66fe-46f4-813d-a556a7cc3a30 req-31a33caa-e137-46b5-a1e0-ba604f893612 service nova] Lock "d042bb16-c84d-42bb-af3f-38c08995fd91-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1813.002861] env[63371]: DEBUG oslo_concurrency.lockutils [req-04a0601b-66fe-46f4-813d-a556a7cc3a30 req-31a33caa-e137-46b5-a1e0-ba604f893612 service nova] Lock "d042bb16-c84d-42bb-af3f-38c08995fd91-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.002861] env[63371]: DEBUG nova.compute.manager [req-04a0601b-66fe-46f4-813d-a556a7cc3a30 req-31a33caa-e137-46b5-a1e0-ba604f893612 service nova] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] No waiting events found dispatching network-vif-plugged-f76030ee-9cbe-4574-a686-4feffec912d6 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1813.002861] env[63371]: WARNING nova.compute.manager [req-04a0601b-66fe-46f4-813d-a556a7cc3a30 req-31a33caa-e137-46b5-a1e0-ba604f893612 service nova] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Received unexpected event network-vif-plugged-f76030ee-9cbe-4574-a686-4feffec912d6 for instance with vm_state building and task_state spawning. [ 1813.024507] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2d304f1e-629a-4b1c-881f-8eae60c9f7cf tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "3a6c12a7-732f-4a73-a8c5-6810b554cc03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.637s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.026950] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3eecda70-fa08-4d58-9be5-d1869c33faf0 tempest-ServersNegativeTestJSON-1665297044 tempest-ServersNegativeTestJSON-1665297044-project-member] Lock "9985dbcd-4498-4629-aae5-5e1933307c50" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.731s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1813.042036] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774818, 'name': ReconfigVM_Task, 'duration_secs': 0.382115} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.046649] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1813.051610] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-43903ea1-4366-4752-8480-0db80e422480 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.062122] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774819, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.16022} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.063756] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1813.064177] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1813.064177] env[63371]: value = "task-1774820" [ 1813.064177] env[63371]: _type = "Task" [ 1813.064177] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.065026] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a31f374b-2cfb-4317-9e81-51223010c6dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.080055] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774820, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.101060] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd/3e2f17e7-8c9c-47c0-afb1-55e56eab74fd.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1813.104315] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68c4b0eb-8394-431b-bd58-9e5f78bb8613 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.126511] env[63371]: DEBUG nova.network.neutron [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Successfully updated port: f76030ee-9cbe-4574-a686-4feffec912d6 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1813.129619] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1813.129619] env[63371]: value = "task-1774821" [ 1813.129619] env[63371]: _type = "Task" [ 1813.129619] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.143024] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774821, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.294964] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d413b979-699f-42e0-93d4-7d2de1259067 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.302737] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5287c6e7-8095-4427-8ea3-98e1345055fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.334606] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7618130c-86f1-4e7f-ad16-dee98cf9b2af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.342799] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85db815-6b8e-49ed-89f2-879f0c2c60fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.357116] env[63371]: DEBUG nova.compute.provider_tree [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1813.578531] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774820, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.631056] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "refresh_cache-d042bb16-c84d-42bb-af3f-38c08995fd91" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1813.631212] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "refresh_cache-d042bb16-c84d-42bb-af3f-38c08995fd91" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1813.631404] env[63371]: DEBUG nova.network.neutron [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1813.644725] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774821, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.860721] env[63371]: DEBUG nova.scheduler.client.report [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1814.078089] env[63371]: DEBUG oslo_vmware.api [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774820, 'name': PowerOnVM_Task, 'duration_secs': 0.966484} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.078483] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1814.081323] env[63371]: DEBUG nova.compute.manager [None req-a8330782-5781-440a-8d95-0ba44284a74e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1814.082202] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bb03a5-c0e4-4d86-9c1b-bd54aba86247 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.143888] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774821, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.182775] env[63371]: DEBUG nova.network.neutron [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1814.338515] env[63371]: DEBUG nova.network.neutron [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Updating instance_info_cache with network_info: [{"id": "f76030ee-9cbe-4574-a686-4feffec912d6", "address": "fa:16:3e:c3:91:f2", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf76030ee-9c", "ovs_interfaceid": "f76030ee-9cbe-4574-a686-4feffec912d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1814.366054] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.897s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.368267] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.094s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.370288] env[63371]: INFO nova.compute.claims [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1814.404129] env[63371]: INFO nova.scheduler.client.report [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Deleted allocations for instance 6c2edb87-7a36-4814-ac4a-199cdca1ef68 [ 1814.644606] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774821, 'name': ReconfigVM_Task, 'duration_secs': 1.064075} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.644888] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd/3e2f17e7-8c9c-47c0-afb1-55e56eab74fd.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1814.645601] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3cc0d48a-ae66-41d2-a96e-83149884b400 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.652194] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1814.652194] env[63371]: value = "task-1774822" [ 1814.652194] env[63371]: _type = "Task" [ 1814.652194] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.660775] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774822, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.841278] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "refresh_cache-d042bb16-c84d-42bb-af3f-38c08995fd91" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1814.841637] env[63371]: DEBUG nova.compute.manager [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Instance network_info: |[{"id": "f76030ee-9cbe-4574-a686-4feffec912d6", "address": "fa:16:3e:c3:91:f2", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf76030ee-9c", "ovs_interfaceid": "f76030ee-9cbe-4574-a686-4feffec912d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1814.842203] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:91:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '39a4aca0-934b-4a91-8779-6a4360c3f967', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f76030ee-9cbe-4574-a686-4feffec912d6', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1814.850791] env[63371]: DEBUG oslo.service.loopingcall [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1814.851084] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1814.851330] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e16cbca0-09bd-4571-a793-9c01b7dd9094 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.875365] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1814.875365] env[63371]: value = "task-1774823" [ 1814.875365] env[63371]: _type = "Task" [ 1814.875365] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.886630] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774823, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.914428] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e289f2f6-7c83-427a-9b72-d2a8292477b9 tempest-InstanceActionsNegativeTestJSON-2085009009 tempest-InstanceActionsNegativeTestJSON-2085009009-project-member] Lock "6c2edb87-7a36-4814-ac4a-199cdca1ef68" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.320s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.074087] env[63371]: DEBUG nova.compute.manager [req-f020453e-bea4-405a-96dc-379ce19058be req-3f754412-f4b7-42bb-a74a-1d419eb7d654 service nova] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Received event network-changed-f76030ee-9cbe-4574-a686-4feffec912d6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1815.074087] env[63371]: DEBUG nova.compute.manager [req-f020453e-bea4-405a-96dc-379ce19058be req-3f754412-f4b7-42bb-a74a-1d419eb7d654 service nova] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Refreshing instance network info cache due to event network-changed-f76030ee-9cbe-4574-a686-4feffec912d6. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1815.076219] env[63371]: DEBUG oslo_concurrency.lockutils [req-f020453e-bea4-405a-96dc-379ce19058be req-3f754412-f4b7-42bb-a74a-1d419eb7d654 service nova] Acquiring lock "refresh_cache-d042bb16-c84d-42bb-af3f-38c08995fd91" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.076380] env[63371]: DEBUG oslo_concurrency.lockutils [req-f020453e-bea4-405a-96dc-379ce19058be req-3f754412-f4b7-42bb-a74a-1d419eb7d654 service nova] Acquired lock "refresh_cache-d042bb16-c84d-42bb-af3f-38c08995fd91" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.076601] env[63371]: DEBUG nova.network.neutron [req-f020453e-bea4-405a-96dc-379ce19058be req-3f754412-f4b7-42bb-a74a-1d419eb7d654 service nova] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Refreshing network info cache for port f76030ee-9cbe-4574-a686-4feffec912d6 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1815.161773] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774822, 'name': Rename_Task, 'duration_secs': 0.145293} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.164221] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1815.164524] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9bc22223-2ad9-4715-a944-fdeab17fd902 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.172066] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1815.172066] env[63371]: value = "task-1774824" [ 1815.172066] env[63371]: _type = "Task" [ 1815.172066] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.182088] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774824, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.390034] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774823, 'name': CreateVM_Task, 'duration_secs': 0.417696} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.390192] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1815.390859] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.391032] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.391362] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1815.391622] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77ed73d5-29e6-49fd-9acb-e446bb9ab96b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.399106] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1815.399106] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fd1620-3bc5-ae36-c3d4-a4a783a7bc86" [ 1815.399106] env[63371]: _type = "Task" [ 1815.399106] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.410232] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fd1620-3bc5-ae36-c3d4-a4a783a7bc86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.617381] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc63d5c8-c8d4-4259-949f-856343f656e3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.626242] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1fa1f9-5aa0-4a40-803f-5511b341555f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.662569] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d716fafb-efe8-4b8d-b5cc-baca402baead {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.670391] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f8e9a4-5935-4d59-997a-682ffe065536 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.693529] env[63371]: DEBUG nova.compute.provider_tree [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1815.698832] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774824, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.767338] env[63371]: INFO nova.compute.manager [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Unrescuing [ 1815.767627] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "refresh_cache-0518c5a8-8cc1-4829-a0cf-5f5904f6df86" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.767784] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "refresh_cache-0518c5a8-8cc1-4829-a0cf-5f5904f6df86" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.767960] env[63371]: DEBUG nova.network.neutron [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1815.911675] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52fd1620-3bc5-ae36-c3d4-a4a783a7bc86, 'name': SearchDatastore_Task, 'duration_secs': 0.010535} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.911995] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1815.912248] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1815.912477] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.912621] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.912793] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1815.913076] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-997e7a79-47a2-492b-8db0-e9c92dac4d45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.923828] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1815.923989] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1815.924713] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-416917e5-d377-47b0-bfcf-2b6f6c3075d5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.929722] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1815.929722] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9551e-808c-d84f-c134-d056e7c817cd" [ 1815.929722] env[63371]: _type = "Task" [ 1815.929722] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.937262] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9551e-808c-d84f-c134-d056e7c817cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.956281] env[63371]: DEBUG nova.network.neutron [req-f020453e-bea4-405a-96dc-379ce19058be req-3f754412-f4b7-42bb-a74a-1d419eb7d654 service nova] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Updated VIF entry in instance network info cache for port f76030ee-9cbe-4574-a686-4feffec912d6. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1815.956637] env[63371]: DEBUG nova.network.neutron [req-f020453e-bea4-405a-96dc-379ce19058be req-3f754412-f4b7-42bb-a74a-1d419eb7d654 service nova] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Updating instance_info_cache with network_info: [{"id": "f76030ee-9cbe-4574-a686-4feffec912d6", "address": "fa:16:3e:c3:91:f2", "network": {"id": "2413ab2b-68e9-4e73-808e-9bef6bf7b969", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1133319485-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e96348bcfea1455dad72945c7c36f027", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "39a4aca0-934b-4a91-8779-6a4360c3f967", "external-id": "nsx-vlan-transportzone-454", "segmentation_id": 454, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf76030ee-9c", "ovs_interfaceid": "f76030ee-9cbe-4574-a686-4feffec912d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.186318] env[63371]: DEBUG oslo_vmware.api [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774824, 'name': PowerOnVM_Task, 'duration_secs': 0.560768} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.186610] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1816.186803] env[63371]: INFO nova.compute.manager [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Took 9.74 seconds to spawn the instance on the hypervisor. [ 1816.186976] env[63371]: DEBUG nova.compute.manager [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1816.187776] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b7f893-925d-4f60-9ae1-5728f5c2120d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.202027] env[63371]: DEBUG nova.scheduler.client.report [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1816.441637] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9551e-808c-d84f-c134-d056e7c817cd, 'name': SearchDatastore_Task, 'duration_secs': 0.008801} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.443691] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2870360-180c-4643-aa47-f06e8d616273 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.450289] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1816.450289] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]522ccc58-596a-54c9-6dab-0e0c7a59b5c6" [ 1816.450289] env[63371]: _type = "Task" [ 1816.450289] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.463123] env[63371]: DEBUG oslo_concurrency.lockutils [req-f020453e-bea4-405a-96dc-379ce19058be req-3f754412-f4b7-42bb-a74a-1d419eb7d654 service nova] Releasing lock "refresh_cache-d042bb16-c84d-42bb-af3f-38c08995fd91" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1816.463123] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522ccc58-596a-54c9-6dab-0e0c7a59b5c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1816.703891] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.336s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.704452] env[63371]: DEBUG nova.compute.manager [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1816.711057] env[63371]: INFO nova.compute.manager [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Took 38.15 seconds to build instance. [ 1816.716194] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.265s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.717900] env[63371]: INFO nova.compute.claims [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1816.890506] env[63371]: DEBUG nova.network.neutron [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Updating instance_info_cache with network_info: [{"id": "e4ee0c90-4a70-4f4e-b976-34412c13da2f", "address": "fa:16:3e:2d:a3:3a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape4ee0c90-4a", "ovs_interfaceid": "e4ee0c90-4a70-4f4e-b976-34412c13da2f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.962105] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]522ccc58-596a-54c9-6dab-0e0c7a59b5c6, 'name': SearchDatastore_Task, 'duration_secs': 0.009858} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1816.962401] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1816.962661] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] d042bb16-c84d-42bb-af3f-38c08995fd91/d042bb16-c84d-42bb-af3f-38c08995fd91.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1816.962936] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49170483-64e4-42cb-8440-74ab5c64ff5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.970868] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1816.970868] env[63371]: value = "task-1774825" [ 1816.970868] env[63371]: _type = "Task" [ 1816.970868] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1816.982583] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774825, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.211891] env[63371]: DEBUG nova.compute.utils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1817.218466] env[63371]: DEBUG nova.compute.manager [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1817.218466] env[63371]: DEBUG nova.network.neutron [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1817.226187] env[63371]: DEBUG oslo_concurrency.lockutils [None req-26fd0d77-035e-40ff-a0d9-49e7c2465590 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.669s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.298405] env[63371]: DEBUG nova.policy [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c6416719728485f8dd45eea9e39fdc5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58f967d3770541269fb89f48b3df58c9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1817.304504] env[63371]: DEBUG nova.compute.manager [req-cd2eca4d-dc63-4965-931b-a36697d49ad8 req-9a785bd7-a5e8-4187-a9c2-249056d6d4ff service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Received event network-changed-45c89cd7-4637-40af-9652-42cad1269c7e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1817.304793] env[63371]: DEBUG nova.compute.manager [req-cd2eca4d-dc63-4965-931b-a36697d49ad8 req-9a785bd7-a5e8-4187-a9c2-249056d6d4ff service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Refreshing instance network info cache due to event network-changed-45c89cd7-4637-40af-9652-42cad1269c7e. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1817.305069] env[63371]: DEBUG oslo_concurrency.lockutils [req-cd2eca4d-dc63-4965-931b-a36697d49ad8 req-9a785bd7-a5e8-4187-a9c2-249056d6d4ff service nova] Acquiring lock "refresh_cache-3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.305288] env[63371]: DEBUG oslo_concurrency.lockutils [req-cd2eca4d-dc63-4965-931b-a36697d49ad8 req-9a785bd7-a5e8-4187-a9c2-249056d6d4ff service nova] Acquired lock "refresh_cache-3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.305496] env[63371]: DEBUG nova.network.neutron [req-cd2eca4d-dc63-4965-931b-a36697d49ad8 req-9a785bd7-a5e8-4187-a9c2-249056d6d4ff service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Refreshing network info cache for port 45c89cd7-4637-40af-9652-42cad1269c7e {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1817.396310] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "refresh_cache-0518c5a8-8cc1-4829-a0cf-5f5904f6df86" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.396977] env[63371]: DEBUG nova.objects.instance [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lazy-loading 'flavor' on Instance uuid 0518c5a8-8cc1-4829-a0cf-5f5904f6df86 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1817.463122] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "da4839fa-8597-411c-b30c-0ac9226fec1f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.463380] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.482800] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774825, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478566} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1817.482933] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] d042bb16-c84d-42bb-af3f-38c08995fd91/d042bb16-c84d-42bb-af3f-38c08995fd91.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1817.483183] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1817.483429] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c7c853c7-8feb-4cda-b8a0-8440ea77e418 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.490342] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1817.490342] env[63371]: value = "task-1774826" [ 1817.490342] env[63371]: _type = "Task" [ 1817.490342] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.499409] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774826, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.619469] env[63371]: DEBUG nova.network.neutron [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Successfully created port: e09a5b9d-78bf-4d1c-98f8-434a0c37c88d {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1817.714508] env[63371]: DEBUG nova.compute.manager [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1817.911322] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ffdba1-e577-4932-9f95-c46399696c9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.941138] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1817.944818] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c7294f8-7bee-469b-9edc-56448a8f8767 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.957926] env[63371]: DEBUG oslo_vmware.api [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1817.957926] env[63371]: value = "task-1774827" [ 1817.957926] env[63371]: _type = "Task" [ 1817.957926] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.969736] env[63371]: DEBUG nova.compute.utils [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1817.971197] env[63371]: DEBUG oslo_vmware.api [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774827, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.000603] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774826, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061432} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.003089] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1818.006676] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397ed266-421a-4ce3-961b-3be14785fba8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.030447] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] d042bb16-c84d-42bb-af3f-38c08995fd91/d042bb16-c84d-42bb-af3f-38c08995fd91.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1818.033295] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e9e1b85-56fb-4582-bd02-6ece8fa507fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.057105] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1818.057105] env[63371]: value = "task-1774828" [ 1818.057105] env[63371]: _type = "Task" [ 1818.057105] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.067609] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774828, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.090028] env[63371]: DEBUG nova.network.neutron [req-cd2eca4d-dc63-4965-931b-a36697d49ad8 req-9a785bd7-a5e8-4187-a9c2-249056d6d4ff service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Updated VIF entry in instance network info cache for port 45c89cd7-4637-40af-9652-42cad1269c7e. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1818.090028] env[63371]: DEBUG nova.network.neutron [req-cd2eca4d-dc63-4965-931b-a36697d49ad8 req-9a785bd7-a5e8-4187-a9c2-249056d6d4ff service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Updating instance_info_cache with network_info: [{"id": "45c89cd7-4637-40af-9652-42cad1269c7e", "address": "fa:16:3e:04:2f:b7", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.203", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap45c89cd7-46", "ovs_interfaceid": "45c89cd7-4637-40af-9652-42cad1269c7e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.103696] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a552eb22-aa59-438b-80ba-401978a8f352 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.111903] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7293f54-15f8-41fd-92f0-760bd77ff9b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.144996] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84bfd72f-b861-4611-bdfa-74e646e934af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.152688] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c72d13b-b56a-400b-a4c9-2f02cc15f92b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.166380] env[63371]: DEBUG nova.compute.provider_tree [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1818.467885] env[63371]: DEBUG oslo_vmware.api [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774827, 'name': PowerOffVM_Task, 'duration_secs': 0.230288} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.468191] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1818.473840] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1818.474247] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-119dafc4-a9db-411c-b70f-c265c0eecf78 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.487455] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.024s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1818.492875] env[63371]: DEBUG oslo_vmware.api [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1818.492875] env[63371]: value = "task-1774829" [ 1818.492875] env[63371]: _type = "Task" [ 1818.492875] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.501928] env[63371]: DEBUG oslo_vmware.api [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774829, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.569300] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774828, 'name': ReconfigVM_Task, 'duration_secs': 0.437016} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.569697] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Reconfigured VM instance instance-00000069 to attach disk [datastore1] d042bb16-c84d-42bb-af3f-38c08995fd91/d042bb16-c84d-42bb-af3f-38c08995fd91.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1818.570317] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07fe52d0-d01a-4a5d-8caa-c84edb328015 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.577345] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1818.577345] env[63371]: value = "task-1774830" [ 1818.577345] env[63371]: _type = "Task" [ 1818.577345] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.586690] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774830, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.592446] env[63371]: DEBUG oslo_concurrency.lockutils [req-cd2eca4d-dc63-4965-931b-a36697d49ad8 req-9a785bd7-a5e8-4187-a9c2-249056d6d4ff service nova] Releasing lock "refresh_cache-3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.669870] env[63371]: DEBUG nova.scheduler.client.report [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1818.731135] env[63371]: DEBUG nova.compute.manager [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1818.760769] env[63371]: DEBUG nova.virt.hardware [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1818.761044] env[63371]: DEBUG nova.virt.hardware [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1818.761248] env[63371]: DEBUG nova.virt.hardware [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1818.761450] env[63371]: DEBUG nova.virt.hardware [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1818.761607] env[63371]: DEBUG nova.virt.hardware [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1818.761756] env[63371]: DEBUG nova.virt.hardware [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1818.762388] env[63371]: DEBUG nova.virt.hardware [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1818.762388] env[63371]: DEBUG nova.virt.hardware [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1818.762388] env[63371]: DEBUG nova.virt.hardware [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1818.762604] env[63371]: DEBUG nova.virt.hardware [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1818.763090] env[63371]: DEBUG nova.virt.hardware [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1818.764038] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67fc6a25-bb72-4f28-883f-4f42045e9699 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.771780] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11af16ab-bcd1-4f0e-89f5-9d007d7a92ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.004090] env[63371]: DEBUG oslo_vmware.api [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774829, 'name': ReconfigVM_Task, 'duration_secs': 0.267244} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.004453] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1819.004940] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1819.005089] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-163c178a-4fe8-4c01-9257-2360428f407d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.012793] env[63371]: DEBUG oslo_vmware.api [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1819.012793] env[63371]: value = "task-1774831" [ 1819.012793] env[63371]: _type = "Task" [ 1819.012793] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.022388] env[63371]: DEBUG oslo_vmware.api [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774831, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.095411] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774830, 'name': Rename_Task, 'duration_secs': 0.320041} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.095411] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1819.095411] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe632497-f397-4fb6-aef7-390baffb1eae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.100751] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1819.100751] env[63371]: value = "task-1774832" [ 1819.100751] env[63371]: _type = "Task" [ 1819.100751] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.110627] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774832, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.179245] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.464s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.179939] env[63371]: DEBUG nova.compute.manager [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1819.183111] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.401s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.184649] env[63371]: INFO nova.compute.claims [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1819.207975] env[63371]: DEBUG nova.compute.manager [req-7871daef-50a4-42ec-b690-155c47189813 req-7022f775-bfd7-4d6a-b23e-94913a41eb8f service nova] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Received event network-vif-plugged-e09a5b9d-78bf-4d1c-98f8-434a0c37c88d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1819.207975] env[63371]: DEBUG oslo_concurrency.lockutils [req-7871daef-50a4-42ec-b690-155c47189813 req-7022f775-bfd7-4d6a-b23e-94913a41eb8f service nova] Acquiring lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.207975] env[63371]: DEBUG oslo_concurrency.lockutils [req-7871daef-50a4-42ec-b690-155c47189813 req-7022f775-bfd7-4d6a-b23e-94913a41eb8f service nova] Lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.207975] env[63371]: DEBUG oslo_concurrency.lockutils [req-7871daef-50a4-42ec-b690-155c47189813 req-7022f775-bfd7-4d6a-b23e-94913a41eb8f service nova] Lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.207975] env[63371]: DEBUG nova.compute.manager [req-7871daef-50a4-42ec-b690-155c47189813 req-7022f775-bfd7-4d6a-b23e-94913a41eb8f service nova] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] No waiting events found dispatching network-vif-plugged-e09a5b9d-78bf-4d1c-98f8-434a0c37c88d {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1819.207975] env[63371]: WARNING nova.compute.manager [req-7871daef-50a4-42ec-b690-155c47189813 req-7022f775-bfd7-4d6a-b23e-94913a41eb8f service nova] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Received unexpected event network-vif-plugged-e09a5b9d-78bf-4d1c-98f8-434a0c37c88d for instance with vm_state building and task_state spawning. [ 1819.332211] env[63371]: DEBUG nova.network.neutron [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Successfully updated port: e09a5b9d-78bf-4d1c-98f8-434a0c37c88d {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1819.522532] env[63371]: DEBUG oslo_vmware.api [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774831, 'name': PowerOnVM_Task, 'duration_secs': 0.488389} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.522880] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1819.523098] env[63371]: DEBUG nova.compute.manager [None req-93b6c8bb-d181-4b2b-ae91-443c4055fda0 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1819.523905] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8ce24e-4cb7-4819-ab05-6ff19fb42bb0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.539112] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "da4839fa-8597-411c-b30c-0ac9226fec1f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.539498] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.539849] env[63371]: INFO nova.compute.manager [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Attaching volume 09edb13c-bd3b-408a-9cb8-685550cd6225 to /dev/sdb [ 1819.572638] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d59bb3-2e07-4fa2-ab4e-b0d2d1344c29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.580939] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee1d149-8760-478e-bb43-93173b662afa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.594525] env[63371]: DEBUG nova.virt.block_device [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Updating existing volume attachment record: 9f26168d-8f97-4350-bad5-27e6180b0cad {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1819.609684] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774832, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.693172] env[63371]: DEBUG nova.compute.utils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1819.696377] env[63371]: DEBUG nova.compute.manager [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1819.696551] env[63371]: DEBUG nova.network.neutron [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1819.738388] env[63371]: DEBUG nova.policy [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5e4bf6cfe9124f3a9ea2df44c43611f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f3df339d9a704d9b9bebecac3871584c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1819.837123] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "refresh_cache-407d1ef8-c5df-4277-b503-0d09cdaf8ef1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.837123] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "refresh_cache-407d1ef8-c5df-4277-b503-0d09cdaf8ef1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.837123] env[63371]: DEBUG nova.network.neutron [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1820.061811] env[63371]: DEBUG nova.network.neutron [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Successfully created port: a22c781d-8374-4914-8e01-d61b8df475a7 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1820.114787] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774832, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.204922] env[63371]: DEBUG nova.compute.manager [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1820.375011] env[63371]: DEBUG nova.network.neutron [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1820.473910] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a74e4e-11b1-46d1-8cd0-b06676c8d05c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.482015] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d4fdfa-3a9b-4d4e-863c-dee40a34cd6e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.516058] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d26eb0-3e08-4165-b7dc-f0dfa2a78116 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.524348] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4da9557-2ea5-4987-8718-8cf4b38b420b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.538217] env[63371]: DEBUG nova.compute.provider_tree [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1820.555490] env[63371]: DEBUG nova.network.neutron [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Updating instance_info_cache with network_info: [{"id": "e09a5b9d-78bf-4d1c-98f8-434a0c37c88d", "address": "fa:16:3e:e4:56:2c", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape09a5b9d-78", "ovs_interfaceid": "e09a5b9d-78bf-4d1c-98f8-434a0c37c88d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1820.613818] env[63371]: DEBUG oslo_vmware.api [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774832, 'name': PowerOnVM_Task, 'duration_secs': 1.087589} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.614133] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1820.614345] env[63371]: INFO nova.compute.manager [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Took 8.10 seconds to spawn the instance on the hypervisor. [ 1820.615143] env[63371]: DEBUG nova.compute.manager [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1820.615303] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288af370-0239-4af1-84ea-da187e94b345 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.041699] env[63371]: DEBUG nova.scheduler.client.report [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1821.058071] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "refresh_cache-407d1ef8-c5df-4277-b503-0d09cdaf8ef1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.058633] env[63371]: DEBUG nova.compute.manager [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Instance network_info: |[{"id": "e09a5b9d-78bf-4d1c-98f8-434a0c37c88d", "address": "fa:16:3e:e4:56:2c", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape09a5b9d-78", "ovs_interfaceid": "e09a5b9d-78bf-4d1c-98f8-434a0c37c88d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1821.058803] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:56:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e09a5b9d-78bf-4d1c-98f8-434a0c37c88d', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1821.067523] env[63371]: DEBUG oslo.service.loopingcall [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1821.068014] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1821.068247] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1849660f-46fb-499e-bb41-f3ba67298c53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.089267] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1821.089267] env[63371]: value = "task-1774834" [ 1821.089267] env[63371]: _type = "Task" [ 1821.089267] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.097567] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774834, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.130938] env[63371]: INFO nova.compute.manager [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Took 35.28 seconds to build instance. [ 1821.217105] env[63371]: DEBUG nova.compute.manager [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1821.228702] env[63371]: DEBUG nova.compute.manager [req-9e3b60f7-9e0b-4b4c-acc1-1262546d77d4 req-c5ef9613-cfe2-418f-bc8b-06c470745e2c service nova] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Received event network-changed-e09a5b9d-78bf-4d1c-98f8-434a0c37c88d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1821.228892] env[63371]: DEBUG nova.compute.manager [req-9e3b60f7-9e0b-4b4c-acc1-1262546d77d4 req-c5ef9613-cfe2-418f-bc8b-06c470745e2c service nova] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Refreshing instance network info cache due to event network-changed-e09a5b9d-78bf-4d1c-98f8-434a0c37c88d. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1821.229203] env[63371]: DEBUG oslo_concurrency.lockutils [req-9e3b60f7-9e0b-4b4c-acc1-1262546d77d4 req-c5ef9613-cfe2-418f-bc8b-06c470745e2c service nova] Acquiring lock "refresh_cache-407d1ef8-c5df-4277-b503-0d09cdaf8ef1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.229388] env[63371]: DEBUG oslo_concurrency.lockutils [req-9e3b60f7-9e0b-4b4c-acc1-1262546d77d4 req-c5ef9613-cfe2-418f-bc8b-06c470745e2c service nova] Acquired lock "refresh_cache-407d1ef8-c5df-4277-b503-0d09cdaf8ef1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.229610] env[63371]: DEBUG nova.network.neutron [req-9e3b60f7-9e0b-4b4c-acc1-1262546d77d4 req-c5ef9613-cfe2-418f-bc8b-06c470745e2c service nova] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Refreshing network info cache for port e09a5b9d-78bf-4d1c-98f8-434a0c37c88d {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1821.244459] env[63371]: DEBUG nova.virt.hardware [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1821.244664] env[63371]: DEBUG nova.virt.hardware [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1821.244821] env[63371]: DEBUG nova.virt.hardware [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1821.245154] env[63371]: DEBUG nova.virt.hardware [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1821.245154] env[63371]: DEBUG nova.virt.hardware [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1821.245290] env[63371]: DEBUG nova.virt.hardware [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1821.245488] env[63371]: DEBUG nova.virt.hardware [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1821.245647] env[63371]: DEBUG nova.virt.hardware [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1821.245811] env[63371]: DEBUG nova.virt.hardware [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1821.245970] env[63371]: DEBUG nova.virt.hardware [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1821.246176] env[63371]: DEBUG nova.virt.hardware [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1821.247408] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a57d7d8-515c-4f32-af5f-5623bd954182 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.255363] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf0f96c-ec6c-4ac3-97ed-ed519c93b6c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.546336] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.546868] env[63371]: DEBUG nova.compute.manager [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1821.550138] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.596s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1821.550403] env[63371]: DEBUG nova.objects.instance [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lazy-loading 'resources' on Instance uuid 11527051-7a4f-481a-b5ed-14550c550c4e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1821.607245] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774834, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.632600] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d919bfa9-c670-4c32-8386-de1d3defd858 tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "d042bb16-c84d-42bb-af3f-38c08995fd91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.793s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1821.749987] env[63371]: DEBUG nova.network.neutron [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Successfully updated port: a22c781d-8374-4914-8e01-d61b8df475a7 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1822.053405] env[63371]: DEBUG nova.compute.utils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1822.058359] env[63371]: DEBUG nova.compute.manager [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1822.058526] env[63371]: DEBUG nova.network.neutron [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1822.107169] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774834, 'name': CreateVM_Task, 'duration_secs': 0.699139} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.107169] env[63371]: DEBUG nova.network.neutron [req-9e3b60f7-9e0b-4b4c-acc1-1262546d77d4 req-c5ef9613-cfe2-418f-bc8b-06c470745e2c service nova] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Updated VIF entry in instance network info cache for port e09a5b9d-78bf-4d1c-98f8-434a0c37c88d. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1822.107494] env[63371]: DEBUG nova.network.neutron [req-9e3b60f7-9e0b-4b4c-acc1-1262546d77d4 req-c5ef9613-cfe2-418f-bc8b-06c470745e2c service nova] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Updating instance_info_cache with network_info: [{"id": "e09a5b9d-78bf-4d1c-98f8-434a0c37c88d", "address": "fa:16:3e:e4:56:2c", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape09a5b9d-78", "ovs_interfaceid": "e09a5b9d-78bf-4d1c-98f8-434a0c37c88d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.108876] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1822.112600] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.112771] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.113128] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1822.114053] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1ba57907-c677-46a3-a3a3-94fe0a22d892 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.119483] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1822.119483] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b94deb-8cd9-d7f1-3535-7403c97de154" [ 1822.119483] env[63371]: _type = "Task" [ 1822.119483] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.128211] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b94deb-8cd9-d7f1-3535-7403c97de154, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.136987] env[63371]: DEBUG nova.policy [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '25f28e53648c41d1a147c1aa04f0a708', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9fb0da840f6847f19f03a1db8a1c3f4f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1822.173803] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "d042bb16-c84d-42bb-af3f-38c08995fd91" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.174281] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "d042bb16-c84d-42bb-af3f-38c08995fd91" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.174571] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "d042bb16-c84d-42bb-af3f-38c08995fd91-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1822.174777] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "d042bb16-c84d-42bb-af3f-38c08995fd91-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1822.174947] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "d042bb16-c84d-42bb-af3f-38c08995fd91-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1822.177281] env[63371]: INFO nova.compute.manager [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Terminating instance [ 1822.179453] env[63371]: DEBUG nova.compute.manager [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1822.179661] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1822.180502] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00c7ec04-f501-4671-8205-5188847bd07a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.191781] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1822.192102] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4c7ad68-1078-4491-9f73-c3484588e7b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.202625] env[63371]: DEBUG oslo_vmware.api [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1822.202625] env[63371]: value = "task-1774836" [ 1822.202625] env[63371]: _type = "Task" [ 1822.202625] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.211039] env[63371]: DEBUG oslo_vmware.api [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774836, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.252030] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "refresh_cache-da2e3b05-9cb0-49bb-8945-924e48cf3431" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.252030] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired lock "refresh_cache-da2e3b05-9cb0-49bb-8945-924e48cf3431" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.252030] env[63371]: DEBUG nova.network.neutron [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1822.351305] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18d4926-69f9-4472-947b-10bfc3aec120 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.358160] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8e8c66-0ee1-4fb4-be96-5ae1dcf04e9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.393879] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00bf9bfa-daca-46ab-9f89-8bcc4c3e5272 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.402603] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9dd4903-6b5b-4109-a130-dde7ff4f0876 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.422698] env[63371]: DEBUG nova.compute.provider_tree [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1822.444232] env[63371]: DEBUG nova.network.neutron [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Successfully created port: fdb2262d-54b0-4555-939f-39915c982e09 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1822.559381] env[63371]: DEBUG nova.compute.manager [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1822.614915] env[63371]: DEBUG oslo_concurrency.lockutils [req-9e3b60f7-9e0b-4b4c-acc1-1262546d77d4 req-c5ef9613-cfe2-418f-bc8b-06c470745e2c service nova] Releasing lock "refresh_cache-407d1ef8-c5df-4277-b503-0d09cdaf8ef1" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.630303] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b94deb-8cd9-d7f1-3535-7403c97de154, 'name': SearchDatastore_Task, 'duration_secs': 0.010753} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.630603] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.630835] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1822.631110] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.631265] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.631438] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1822.631989] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-361f9fef-95c3-4596-ad4b-d605446bdace {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.639806] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1822.639973] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1822.640663] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddece7ac-bcf3-4bfd-af14-b581d6a00cc0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.645596] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1822.645596] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a28cc8-795f-0cb5-3053-d849465d18b9" [ 1822.645596] env[63371]: _type = "Task" [ 1822.645596] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.652762] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a28cc8-795f-0cb5-3053-d849465d18b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.711520] env[63371]: DEBUG oslo_vmware.api [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774836, 'name': PowerOffVM_Task, 'duration_secs': 0.293719} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.711783] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1822.711949] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1822.712234] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-917a2e3a-0955-4598-ad7b-660446c1c4ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.795182] env[63371]: DEBUG nova.network.neutron [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1822.817438] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1822.817655] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1822.817858] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleting the datastore file [datastore1] d042bb16-c84d-42bb-af3f-38c08995fd91 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1822.818158] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc2189af-622d-4605-b693-daae5f58b0bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.826331] env[63371]: DEBUG oslo_vmware.api [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for the task: (returnval){ [ 1822.826331] env[63371]: value = "task-1774838" [ 1822.826331] env[63371]: _type = "Task" [ 1822.826331] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.834758] env[63371]: DEBUG oslo_vmware.api [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774838, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.927978] env[63371]: DEBUG nova.scheduler.client.report [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1822.971807] env[63371]: DEBUG nova.network.neutron [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Updating instance_info_cache with network_info: [{"id": "a22c781d-8374-4914-8e01-d61b8df475a7", "address": "fa:16:3e:d9:60:17", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa22c781d-83", "ovs_interfaceid": "a22c781d-8374-4914-8e01-d61b8df475a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.155743] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a28cc8-795f-0cb5-3053-d849465d18b9, 'name': SearchDatastore_Task, 'duration_secs': 0.008785} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.156601] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4f94b78-7115-4969-8809-ef2aac5ae1b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.162129] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1823.162129] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d5e55b-0692-6610-ea81-c2f29f9e1ae5" [ 1823.162129] env[63371]: _type = "Task" [ 1823.162129] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.170281] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d5e55b-0692-6610-ea81-c2f29f9e1ae5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.336472] env[63371]: DEBUG oslo_vmware.api [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Task: {'id': task-1774838, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132394} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.336849] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1823.337072] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1823.337261] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1823.337435] env[63371]: INFO nova.compute.manager [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1823.337681] env[63371]: DEBUG oslo.service.loopingcall [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1823.337872] env[63371]: DEBUG nova.compute.manager [-] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1823.339269] env[63371]: DEBUG nova.network.neutron [-] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1823.363299] env[63371]: DEBUG nova.compute.manager [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Received event network-vif-plugged-a22c781d-8374-4914-8e01-d61b8df475a7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1823.363516] env[63371]: DEBUG oslo_concurrency.lockutils [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] Acquiring lock "da2e3b05-9cb0-49bb-8945-924e48cf3431-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.363722] env[63371]: DEBUG oslo_concurrency.lockutils [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.363888] env[63371]: DEBUG oslo_concurrency.lockutils [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.366062] env[63371]: DEBUG nova.compute.manager [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] No waiting events found dispatching network-vif-plugged-a22c781d-8374-4914-8e01-d61b8df475a7 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1823.366294] env[63371]: WARNING nova.compute.manager [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Received unexpected event network-vif-plugged-a22c781d-8374-4914-8e01-d61b8df475a7 for instance with vm_state building and task_state spawning. [ 1823.366466] env[63371]: DEBUG nova.compute.manager [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Received event network-changed-a22c781d-8374-4914-8e01-d61b8df475a7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1823.366622] env[63371]: DEBUG nova.compute.manager [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Refreshing instance network info cache due to event network-changed-a22c781d-8374-4914-8e01-d61b8df475a7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1823.366790] env[63371]: DEBUG oslo_concurrency.lockutils [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] Acquiring lock "refresh_cache-da2e3b05-9cb0-49bb-8945-924e48cf3431" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1823.433320] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.883s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.435714] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.354s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.436686] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.441019] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.753s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.441019] env[63371]: INFO nova.compute.claims [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1823.452612] env[63371]: INFO nova.scheduler.client.report [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Deleted allocations for instance 11527051-7a4f-481a-b5ed-14550c550c4e [ 1823.464224] env[63371]: INFO nova.scheduler.client.report [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleted allocations for instance 9862b0f0-ccf6-4e69-9e78-cf864adaa65e [ 1823.475100] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Releasing lock "refresh_cache-da2e3b05-9cb0-49bb-8945-924e48cf3431" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.475100] env[63371]: DEBUG nova.compute.manager [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Instance network_info: |[{"id": "a22c781d-8374-4914-8e01-d61b8df475a7", "address": "fa:16:3e:d9:60:17", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa22c781d-83", "ovs_interfaceid": "a22c781d-8374-4914-8e01-d61b8df475a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1823.475656] env[63371]: DEBUG oslo_concurrency.lockutils [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] Acquired lock "refresh_cache-da2e3b05-9cb0-49bb-8945-924e48cf3431" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1823.475656] env[63371]: DEBUG nova.network.neutron [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Refreshing network info cache for port a22c781d-8374-4914-8e01-d61b8df475a7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1823.477210] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:60:17', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d6abf71-e893-4dec-9a05-0fe7d6c0624e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a22c781d-8374-4914-8e01-d61b8df475a7', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1823.488361] env[63371]: DEBUG oslo.service.loopingcall [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1823.492383] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1823.492383] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f2538d7-039f-4fc7-8c64-bcf8cd06d514 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.514806] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1823.514806] env[63371]: value = "task-1774839" [ 1823.514806] env[63371]: _type = "Task" [ 1823.514806] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.524650] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774839, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.568141] env[63371]: DEBUG nova.compute.manager [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1823.600569] env[63371]: DEBUG nova.virt.hardware [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1823.600986] env[63371]: DEBUG nova.virt.hardware [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1823.601350] env[63371]: DEBUG nova.virt.hardware [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1823.601574] env[63371]: DEBUG nova.virt.hardware [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1823.601834] env[63371]: DEBUG nova.virt.hardware [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1823.602140] env[63371]: DEBUG nova.virt.hardware [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1823.602532] env[63371]: DEBUG nova.virt.hardware [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1823.602808] env[63371]: DEBUG nova.virt.hardware [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1823.603159] env[63371]: DEBUG nova.virt.hardware [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1823.603497] env[63371]: DEBUG nova.virt.hardware [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1823.603843] env[63371]: DEBUG nova.virt.hardware [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1823.605704] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1257aa-20cc-47ae-bbba-864bcf3ef237 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.621616] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976b19ce-1285-41cd-a0fe-8137b207da10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.631934] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.632216] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.673909] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d5e55b-0692-6610-ea81-c2f29f9e1ae5, 'name': SearchDatastore_Task, 'duration_secs': 0.010021} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.674227] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.674484] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 407d1ef8-c5df-4277-b503-0d09cdaf8ef1/407d1ef8-c5df-4277-b503-0d09cdaf8ef1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1823.674740] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-913bbfa2-e80d-43ef-a370-360358879960 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.681123] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1823.681123] env[63371]: value = "task-1774840" [ 1823.681123] env[63371]: _type = "Task" [ 1823.681123] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.691584] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774840, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.723348] env[63371]: DEBUG nova.compute.manager [req-16bce0bb-f7c3-4c1c-a214-fb22d5aaa03f req-d22d8e0c-323f-4855-8e06-9bd0ea473767 service nova] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Received event network-vif-deleted-f76030ee-9cbe-4574-a686-4feffec912d6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1823.723555] env[63371]: INFO nova.compute.manager [req-16bce0bb-f7c3-4c1c-a214-fb22d5aaa03f req-d22d8e0c-323f-4855-8e06-9bd0ea473767 service nova] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Neutron deleted interface f76030ee-9cbe-4574-a686-4feffec912d6; detaching it from the instance and deleting it from the info cache [ 1823.723724] env[63371]: DEBUG nova.network.neutron [req-16bce0bb-f7c3-4c1c-a214-fb22d5aaa03f req-d22d8e0c-323f-4855-8e06-9bd0ea473767 service nova] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.758568] env[63371]: DEBUG nova.network.neutron [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Updated VIF entry in instance network info cache for port a22c781d-8374-4914-8e01-d61b8df475a7. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1823.758941] env[63371]: DEBUG nova.network.neutron [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Updating instance_info_cache with network_info: [{"id": "a22c781d-8374-4914-8e01-d61b8df475a7", "address": "fa:16:3e:d9:60:17", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa22c781d-83", "ovs_interfaceid": "a22c781d-8374-4914-8e01-d61b8df475a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.960451] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16ec268c-fbe8-4e0b-918f-523d0e4870d5 tempest-ListServerFiltersTestJSON-1447803434 tempest-ListServerFiltersTestJSON-1447803434-project-member] Lock "11527051-7a4f-481a-b5ed-14550c550c4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.920s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.975950] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6145617b-3940-4531-a793-9b95c2f40970 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "9862b0f0-ccf6-4e69-9e78-cf864adaa65e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.999s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.025461] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774839, 'name': CreateVM_Task, 'duration_secs': 0.367354} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.025663] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1824.026380] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.026548] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.026865] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1824.027152] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a6be660-943a-4dda-b867-44bdb7d22fcd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.032128] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1824.032128] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525261de-2ba1-98c7-8d06-b151429309eb" [ 1824.032128] env[63371]: _type = "Task" [ 1824.032128] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.041022] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525261de-2ba1-98c7-8d06-b151429309eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.123772] env[63371]: DEBUG nova.network.neutron [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Successfully updated port: fdb2262d-54b0-4555-939f-39915c982e09 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1824.135807] env[63371]: DEBUG nova.compute.manager [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1824.150504] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1824.150742] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368481', 'volume_id': '09edb13c-bd3b-408a-9cb8-685550cd6225', 'name': 'volume-09edb13c-bd3b-408a-9cb8-685550cd6225', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'da4839fa-8597-411c-b30c-0ac9226fec1f', 'attached_at': '', 'detached_at': '', 'volume_id': '09edb13c-bd3b-408a-9cb8-685550cd6225', 'serial': '09edb13c-bd3b-408a-9cb8-685550cd6225'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1824.151647] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-381dc867-83f7-4f34-8e10-afb6d72950f2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.169173] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea24eea-f020-4fe7-809c-faf04e119246 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.195987] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] volume-09edb13c-bd3b-408a-9cb8-685550cd6225/volume-09edb13c-bd3b-408a-9cb8-685550cd6225.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1824.199423] env[63371]: DEBUG nova.network.neutron [-] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1824.200545] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c8fa6c3-1f45-4c17-a991-5e2d93b9a112 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.213542] env[63371]: INFO nova.compute.manager [-] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Took 0.88 seconds to deallocate network for instance. [ 1824.221771] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774840, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490948} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.222927] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 407d1ef8-c5df-4277-b503-0d09cdaf8ef1/407d1ef8-c5df-4277-b503-0d09cdaf8ef1.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1824.223191] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1824.223703] env[63371]: DEBUG oslo_vmware.api [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1824.223703] env[63371]: value = "task-1774841" [ 1824.223703] env[63371]: _type = "Task" [ 1824.223703] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.223883] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-99012de7-7beb-4a46-b0b7-995047add009 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.228826] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4d835cfc-ac5c-4d53-b517-af4c17acbd73 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.235949] env[63371]: DEBUG oslo_vmware.api [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774841, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.237873] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1824.237873] env[63371]: value = "task-1774842" [ 1824.237873] env[63371]: _type = "Task" [ 1824.237873] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.240738] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52c6de8-db53-490c-8e6d-9efd86c3445c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.262351] env[63371]: DEBUG oslo_concurrency.lockutils [req-574fc57c-a8a6-4d16-ab9e-096641f1f312 req-ba18b881-b5ef-4cf1-8c20-ace6c10c1408 service nova] Releasing lock "refresh_cache-da2e3b05-9cb0-49bb-8945-924e48cf3431" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.262937] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774842, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.276948] env[63371]: DEBUG nova.compute.manager [req-16bce0bb-f7c3-4c1c-a214-fb22d5aaa03f req-d22d8e0c-323f-4855-8e06-9bd0ea473767 service nova] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Detach interface failed, port_id=f76030ee-9cbe-4574-a686-4feffec912d6, reason: Instance d042bb16-c84d-42bb-af3f-38c08995fd91 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1824.544470] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525261de-2ba1-98c7-8d06-b151429309eb, 'name': SearchDatastore_Task, 'duration_secs': 0.055329} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.547352] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1824.547616] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1824.547843] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.547983] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.548209] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1824.548661] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d276f01-ff34-4a45-9586-07afb60a1f82 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.557224] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1824.557424] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1824.558308] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27737d38-250d-4446-ba65-d057940baf4d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.566828] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1824.566828] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521ba81c-825e-dbb7-0168-72e318aaf798" [ 1824.566828] env[63371]: _type = "Task" [ 1824.566828] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.576879] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521ba81c-825e-dbb7-0168-72e318aaf798, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.627572] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1824.627741] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1824.627843] env[63371]: DEBUG nova.network.neutron [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1824.658216] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.672836] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d603512b-374e-4680-a5b3-93aa89e05856 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.681839] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5742fb73-7c2a-4b8d-8e7c-bf2a4c8dc1ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.714875] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6acdf4-b916-4e45-a2b4-998a944ad5f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.722833] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86587f8-3183-40c7-85f2-c93f7a9c3ef7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.727908] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1824.745813] env[63371]: DEBUG nova.compute.provider_tree [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1824.752651] env[63371]: DEBUG oslo_vmware.api [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774841, 'name': ReconfigVM_Task, 'duration_secs': 0.410201} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.753220] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Reconfigured VM instance instance-00000062 to attach disk [datastore1] volume-09edb13c-bd3b-408a-9cb8-685550cd6225/volume-09edb13c-bd3b-408a-9cb8-685550cd6225.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1824.758782] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76656244-68b3-43ec-bb13-c4d1df0fc44f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.771773] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774842, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065359} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.772380] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1824.773189] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba6a77ed-2006-4403-8df7-322c06ef8450 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.777319] env[63371]: DEBUG oslo_vmware.api [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1824.777319] env[63371]: value = "task-1774843" [ 1824.777319] env[63371]: _type = "Task" [ 1824.777319] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.799068] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 407d1ef8-c5df-4277-b503-0d09cdaf8ef1/407d1ef8-c5df-4277-b503-0d09cdaf8ef1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1824.800444] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7147bb8-c4ce-4798-bf87-a22c74058387 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.820396] env[63371]: DEBUG oslo_vmware.api [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774843, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.826643] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1824.826643] env[63371]: value = "task-1774844" [ 1824.826643] env[63371]: _type = "Task" [ 1824.826643] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.837698] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774844, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.077332] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521ba81c-825e-dbb7-0168-72e318aaf798, 'name': SearchDatastore_Task, 'duration_secs': 0.009721} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.078145] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ac6e155-e61f-4de7-89fd-4278f93d6c9d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.083468] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1825.083468] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d096fc-31a8-31b8-e1ed-e0bb7886dd2d" [ 1825.083468] env[63371]: _type = "Task" [ 1825.083468] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.091642] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d096fc-31a8-31b8-e1ed-e0bb7886dd2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.193424] env[63371]: DEBUG nova.network.neutron [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1825.257029] env[63371]: DEBUG nova.scheduler.client.report [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1825.290051] env[63371]: DEBUG oslo_vmware.api [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774843, 'name': ReconfigVM_Task, 'duration_secs': 0.217735} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.290753] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368481', 'volume_id': '09edb13c-bd3b-408a-9cb8-685550cd6225', 'name': 'volume-09edb13c-bd3b-408a-9cb8-685550cd6225', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'da4839fa-8597-411c-b30c-0ac9226fec1f', 'attached_at': '', 'detached_at': '', 'volume_id': '09edb13c-bd3b-408a-9cb8-685550cd6225', 'serial': '09edb13c-bd3b-408a-9cb8-685550cd6225'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1825.336433] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774844, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.361159] env[63371]: DEBUG nova.network.neutron [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance_info_cache with network_info: [{"id": "fdb2262d-54b0-4555-939f-39915c982e09", "address": "fa:16:3e:0d:42:9d", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdb2262d-54", "ovs_interfaceid": "fdb2262d-54b0-4555-939f-39915c982e09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1825.595098] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d096fc-31a8-31b8-e1ed-e0bb7886dd2d, 'name': SearchDatastore_Task, 'duration_secs': 0.010499} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.595835] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.596328] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] da2e3b05-9cb0-49bb-8945-924e48cf3431/da2e3b05-9cb0-49bb-8945-924e48cf3431.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1825.596723] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7906d901-9db9-4577-a5f2-0a7fab7c841a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.604272] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1825.604272] env[63371]: value = "task-1774845" [ 1825.604272] env[63371]: _type = "Task" [ 1825.604272] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.613478] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774845, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.617739] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.618200] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.705211] env[63371]: DEBUG nova.compute.manager [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Received event network-vif-plugged-fdb2262d-54b0-4555-939f-39915c982e09 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1825.705551] env[63371]: DEBUG oslo_concurrency.lockutils [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] Acquiring lock "1689fc63-3c07-4517-bbef-0011d860e9fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.705871] env[63371]: DEBUG oslo_concurrency.lockutils [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.706190] env[63371]: DEBUG oslo_concurrency.lockutils [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.706473] env[63371]: DEBUG nova.compute.manager [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] No waiting events found dispatching network-vif-plugged-fdb2262d-54b0-4555-939f-39915c982e09 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1825.706828] env[63371]: WARNING nova.compute.manager [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Received unexpected event network-vif-plugged-fdb2262d-54b0-4555-939f-39915c982e09 for instance with vm_state building and task_state spawning. [ 1825.707097] env[63371]: DEBUG nova.compute.manager [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Received event network-changed-fdb2262d-54b0-4555-939f-39915c982e09 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1825.707354] env[63371]: DEBUG nova.compute.manager [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Refreshing instance network info cache due to event network-changed-fdb2262d-54b0-4555-939f-39915c982e09. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1825.707619] env[63371]: DEBUG oslo_concurrency.lockutils [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] Acquiring lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1825.764581] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.326s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.764927] env[63371]: DEBUG nova.compute.manager [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1825.767990] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.321s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.769428] env[63371]: INFO nova.compute.claims [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1825.838153] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774844, 'name': ReconfigVM_Task, 'duration_secs': 0.657264} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.840356] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 407d1ef8-c5df-4277-b503-0d09cdaf8ef1/407d1ef8-c5df-4277-b503-0d09cdaf8ef1.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1825.841576] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b541822a-995d-4c13-b2d4-9b4f81bec3ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.850536] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1825.850536] env[63371]: value = "task-1774846" [ 1825.850536] env[63371]: _type = "Task" [ 1825.850536] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.860245] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774846, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.864620] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1825.864620] env[63371]: DEBUG nova.compute.manager [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Instance network_info: |[{"id": "fdb2262d-54b0-4555-939f-39915c982e09", "address": "fa:16:3e:0d:42:9d", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdb2262d-54", "ovs_interfaceid": "fdb2262d-54b0-4555-939f-39915c982e09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1825.865079] env[63371]: DEBUG oslo_concurrency.lockutils [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] Acquired lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1825.865451] env[63371]: DEBUG nova.network.neutron [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Refreshing network info cache for port fdb2262d-54b0-4555-939f-39915c982e09 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1825.867046] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0d:42:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7e0240aa-a694-48fc-a0f9-6f2d3e71aa12', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fdb2262d-54b0-4555-939f-39915c982e09', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1825.875642] env[63371]: DEBUG oslo.service.loopingcall [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1825.877031] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1825.877325] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e363a66-4baa-4da0-b364-d3f1ae32470a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.898352] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1825.898352] env[63371]: value = "task-1774847" [ 1825.898352] env[63371]: _type = "Task" [ 1825.898352] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.910350] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774847, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.117445] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774845, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.121085] env[63371]: DEBUG nova.compute.manager [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1826.131663] env[63371]: DEBUG nova.objects.instance [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lazy-loading 'flavor' on Instance uuid 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1826.277741] env[63371]: DEBUG nova.compute.utils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1826.279290] env[63371]: DEBUG nova.compute.manager [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1826.279475] env[63371]: DEBUG nova.network.neutron [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1826.317316] env[63371]: DEBUG nova.policy [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aacd81490704110b6cc6aba338883a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a5897667b6b47deb7ff5b64f9499f36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1826.342523] env[63371]: DEBUG nova.objects.instance [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lazy-loading 'flavor' on Instance uuid da4839fa-8597-411c-b30c-0ac9226fec1f {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1826.360660] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774846, 'name': Rename_Task, 'duration_secs': 0.180144} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.360925] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1826.361187] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-daa05b7a-2bd0-41dc-8da6-d8e7d70575d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.368283] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1826.368283] env[63371]: value = "task-1774848" [ 1826.368283] env[63371]: _type = "Task" [ 1826.368283] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.384161] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774848, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.407756] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774847, 'name': CreateVM_Task, 'duration_secs': 0.473442} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.407961] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1826.408668] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.408837] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.409270] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1826.409546] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2115aaf-07ff-4fba-8cce-c262572e6a86 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.413971] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1826.413971] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528d2442-aace-01bd-9372-a900db393ea3" [ 1826.413971] env[63371]: _type = "Task" [ 1826.413971] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.423640] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528d2442-aace-01bd-9372-a900db393ea3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.617457] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774845, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.651576} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.617722] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] da2e3b05-9cb0-49bb-8945-924e48cf3431/da2e3b05-9cb0-49bb-8945-924e48cf3431.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1826.617873] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1826.618826] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9812ccd-0206-4163-861f-d88cba3d9aca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.628100] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1826.628100] env[63371]: value = "task-1774849" [ 1826.628100] env[63371]: _type = "Task" [ 1826.628100] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.636438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquiring lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.636628] env[63371]: DEBUG oslo_concurrency.lockutils [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquired lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.641487] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774849, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.652193] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.746990] env[63371]: DEBUG nova.network.neutron [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updated VIF entry in instance network info cache for port fdb2262d-54b0-4555-939f-39915c982e09. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1826.747396] env[63371]: DEBUG nova.network.neutron [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance_info_cache with network_info: [{"id": "fdb2262d-54b0-4555-939f-39915c982e09", "address": "fa:16:3e:0d:42:9d", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdb2262d-54", "ovs_interfaceid": "fdb2262d-54b0-4555-939f-39915c982e09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1826.786020] env[63371]: DEBUG nova.compute.manager [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1826.787815] env[63371]: DEBUG nova.network.neutron [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Successfully created port: 39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1826.847364] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0fff26a8-3075-4a36-ad10-b6b9626fbc07 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.308s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.880147] env[63371]: DEBUG oslo_vmware.api [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774848, 'name': PowerOnVM_Task, 'duration_secs': 0.446946} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.881119] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1826.881119] env[63371]: INFO nova.compute.manager [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Took 8.15 seconds to spawn the instance on the hypervisor. [ 1826.881119] env[63371]: DEBUG nova.compute.manager [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1826.881927] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef204014-3737-4d95-b992-51de4ad543c8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.928371] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528d2442-aace-01bd-9372-a900db393ea3, 'name': SearchDatastore_Task, 'duration_secs': 0.008946} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.929173] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.929173] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1826.929281] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.929375] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.929511] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1826.929909] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3134e524-d36d-4754-b5be-6a749add0524 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.945028] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1826.945028] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1826.945028] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56b620b9-fd0e-49ba-8c8f-f66d1adfaaad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.951970] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1826.951970] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528eae27-a420-6ccd-0b14-ed4b46423f83" [ 1826.951970] env[63371]: _type = "Task" [ 1826.951970] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.969817] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528eae27-a420-6ccd-0b14-ed4b46423f83, 'name': SearchDatastore_Task, 'duration_secs': 0.00977} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.970746] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b51b2622-7638-478b-816c-6f23af20f34b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.977766] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1826.977766] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528f7571-eec2-254d-499e-f86b9bd6def3" [ 1826.977766] env[63371]: _type = "Task" [ 1826.977766] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1826.992021] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528f7571-eec2-254d-499e-f86b9bd6def3, 'name': SearchDatastore_Task, 'duration_secs': 0.00941} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.992273] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1826.992525] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1689fc63-3c07-4517-bbef-0011d860e9fc/1689fc63-3c07-4517-bbef-0011d860e9fc.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1826.993250] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-252b6ded-d50e-497f-8251-51c84df77a46 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.002079] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1827.002079] env[63371]: value = "task-1774850" [ 1827.002079] env[63371]: _type = "Task" [ 1827.002079] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.010947] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.078161] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c95278a-a629-42ef-94f9-b95a613be38d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.086855] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0063d93a-2491-4dbb-be1f-85be4c3a535e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.120586] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473362e8-7e8e-42f6-8e2b-7b31d33cd894 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.128064] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e3144a4-80e4-436e-b1ac-591d391be75d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.145174] env[63371]: DEBUG nova.compute.provider_tree [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1827.150043] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774849, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.108966} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.150043] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1827.150732] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1887c0ee-ad8b-43fc-b1bd-b96f4829e031 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.173798] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] da2e3b05-9cb0-49bb-8945-924e48cf3431/da2e3b05-9cb0-49bb-8945-924e48cf3431.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1827.175123] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8189829e-e591-4149-a6e8-ca781ebd5ed9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.197378] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1827.197378] env[63371]: value = "task-1774851" [ 1827.197378] env[63371]: _type = "Task" [ 1827.197378] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.208666] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774851, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.213481] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "da4839fa-8597-411c-b30c-0ac9226fec1f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1827.213752] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1827.250319] env[63371]: DEBUG oslo_concurrency.lockutils [req-4a92289f-48fb-4e92-a527-981dfa63247f req-9f097584-4eb4-4baa-9aa4-8cf3ca648f2d service nova] Releasing lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.375085] env[63371]: DEBUG nova.network.neutron [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1827.401911] env[63371]: DEBUG nova.compute.manager [req-9eddc791-94d9-4340-be21-599a459e3b77 req-4a22914b-24e5-4073-b886-da0162e769da service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Received event network-changed-dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1827.402200] env[63371]: DEBUG nova.compute.manager [req-9eddc791-94d9-4340-be21-599a459e3b77 req-4a22914b-24e5-4073-b886-da0162e769da service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Refreshing instance network info cache due to event network-changed-dd28ee6f-5efa-4009-842b-c1c9af10f8ea. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1827.402789] env[63371]: DEBUG oslo_concurrency.lockutils [req-9eddc791-94d9-4340-be21-599a459e3b77 req-4a22914b-24e5-4073-b886-da0162e769da service nova] Acquiring lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1827.405442] env[63371]: INFO nova.compute.manager [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Took 32.15 seconds to build instance. [ 1827.509536] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774850, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474168} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.509794] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 1689fc63-3c07-4517-bbef-0011d860e9fc/1689fc63-3c07-4517-bbef-0011d860e9fc.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1827.510012] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1827.510305] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-55fd6ea2-8ef2-4b28-9fa5-9afd36d36d3e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.516120] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1827.516120] env[63371]: value = "task-1774852" [ 1827.516120] env[63371]: _type = "Task" [ 1827.516120] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.525286] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774852, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.651738] env[63371]: DEBUG nova.scheduler.client.report [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1827.707296] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774851, 'name': ReconfigVM_Task, 'duration_secs': 0.327038} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.707568] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Reconfigured VM instance instance-0000006b to attach disk [datastore1] da2e3b05-9cb0-49bb-8945-924e48cf3431/da2e3b05-9cb0-49bb-8945-924e48cf3431.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1827.708209] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5062d909-5cbd-4d62-8bed-01a0033be540 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.713924] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1827.713924] env[63371]: value = "task-1774853" [ 1827.713924] env[63371]: _type = "Task" [ 1827.713924] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.717080] env[63371]: INFO nova.compute.manager [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Detaching volume 09edb13c-bd3b-408a-9cb8-685550cd6225 [ 1827.723634] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774853, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.755767] env[63371]: INFO nova.virt.block_device [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Attempting to driver detach volume 09edb13c-bd3b-408a-9cb8-685550cd6225 from mountpoint /dev/sdb [ 1827.756058] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1827.756218] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368481', 'volume_id': '09edb13c-bd3b-408a-9cb8-685550cd6225', 'name': 'volume-09edb13c-bd3b-408a-9cb8-685550cd6225', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'da4839fa-8597-411c-b30c-0ac9226fec1f', 'attached_at': '', 'detached_at': '', 'volume_id': '09edb13c-bd3b-408a-9cb8-685550cd6225', 'serial': '09edb13c-bd3b-408a-9cb8-685550cd6225'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1827.757094] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b41b725-4144-48b0-88df-ce656f5effcf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.778611] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d7c480-876e-4145-99b9-fd8e31225f2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.785639] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bf29f4-a53c-43a6-9a57-2d4fcc04e520 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.808156] env[63371]: DEBUG nova.compute.manager [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1827.812661] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f80d1de-7254-4e1d-83a8-a5324ceebfb1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.828325] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] The volume has not been displaced from its original location: [datastore1] volume-09edb13c-bd3b-408a-9cb8-685550cd6225/volume-09edb13c-bd3b-408a-9cb8-685550cd6225.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1827.834145] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1827.836854] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-933b6645-c8c2-466f-ac9b-212e3eb7226d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.857159] env[63371]: DEBUG nova.virt.hardware [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1827.857426] env[63371]: DEBUG nova.virt.hardware [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1827.857539] env[63371]: DEBUG nova.virt.hardware [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1827.857711] env[63371]: DEBUG nova.virt.hardware [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1827.857845] env[63371]: DEBUG nova.virt.hardware [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1827.857981] env[63371]: DEBUG nova.virt.hardware [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1827.858211] env[63371]: DEBUG nova.virt.hardware [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1827.858364] env[63371]: DEBUG nova.virt.hardware [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1827.858520] env[63371]: DEBUG nova.virt.hardware [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1827.858802] env[63371]: DEBUG nova.virt.hardware [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1827.858899] env[63371]: DEBUG nova.virt.hardware [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1827.859196] env[63371]: DEBUG oslo_vmware.api [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1827.859196] env[63371]: value = "task-1774854" [ 1827.859196] env[63371]: _type = "Task" [ 1827.859196] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.859887] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f919225-8776-404f-8c93-12ec1471559d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.873954] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0466290-cfa5-497d-aa94-919a5934925d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.878419] env[63371]: DEBUG oslo_vmware.api [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774854, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.908110] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a2f747-4f6a-4693-84eb-6a3fb5f69c11 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.661s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.026148] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774852, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078209} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.026148] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1828.026320] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6736acad-40d1-4aa2-a051-fa5229f1c425 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.048237] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 1689fc63-3c07-4517-bbef-0011d860e9fc/1689fc63-3c07-4517-bbef-0011d860e9fc.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1828.050731] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f3825a6-bd74-43cf-8436-b61892269a13 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.071419] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1828.071419] env[63371]: value = "task-1774855" [ 1828.071419] env[63371]: _type = "Task" [ 1828.071419] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.079903] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774855, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.156541] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.388s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.157032] env[63371]: DEBUG nova.compute.manager [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1828.159797] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.502s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.161210] env[63371]: INFO nova.compute.claims [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1828.226738] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774853, 'name': Rename_Task, 'duration_secs': 0.147079} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.227038] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1828.227295] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1a1d10a8-b0b5-491f-865f-d43502df661f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.234588] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1828.234588] env[63371]: value = "task-1774856" [ 1828.234588] env[63371]: _type = "Task" [ 1828.234588] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.244020] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774856, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.278493] env[63371]: DEBUG nova.network.neutron [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Updating instance_info_cache with network_info: [{"id": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "address": "fa:16:3e:d1:7d:4b", "network": {"id": "1f2162b2-ad92-4b6d-9e1c-02a0d65dd460", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-529696742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40f26a0147d245e59fa8a860280852e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd28ee6f-5e", "ovs_interfaceid": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1828.353071] env[63371]: DEBUG nova.compute.manager [req-4bf82956-b044-4f97-898b-13afb4ca4109 req-0e07491c-fcf1-4792-b62c-55c172e1fafe service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Received event network-vif-plugged-39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1828.353350] env[63371]: DEBUG oslo_concurrency.lockutils [req-4bf82956-b044-4f97-898b-13afb4ca4109 req-0e07491c-fcf1-4792-b62c-55c172e1fafe service nova] Acquiring lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1828.353554] env[63371]: DEBUG oslo_concurrency.lockutils [req-4bf82956-b044-4f97-898b-13afb4ca4109 req-0e07491c-fcf1-4792-b62c-55c172e1fafe service nova] Lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.353705] env[63371]: DEBUG oslo_concurrency.lockutils [req-4bf82956-b044-4f97-898b-13afb4ca4109 req-0e07491c-fcf1-4792-b62c-55c172e1fafe service nova] Lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.353874] env[63371]: DEBUG nova.compute.manager [req-4bf82956-b044-4f97-898b-13afb4ca4109 req-0e07491c-fcf1-4792-b62c-55c172e1fafe service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] No waiting events found dispatching network-vif-plugged-39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1828.358022] env[63371]: WARNING nova.compute.manager [req-4bf82956-b044-4f97-898b-13afb4ca4109 req-0e07491c-fcf1-4792-b62c-55c172e1fafe service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Received unexpected event network-vif-plugged-39fe8c75-7aaa-42da-a231-9c68310ef7c8 for instance with vm_state building and task_state spawning. [ 1828.372463] env[63371]: DEBUG oslo_vmware.api [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774854, 'name': ReconfigVM_Task, 'duration_secs': 0.461816} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.372740] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1828.377633] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c3c0b97d-5f34-47e4-ae30-1b8d98441b39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.392779] env[63371]: DEBUG oslo_vmware.api [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1828.392779] env[63371]: value = "task-1774857" [ 1828.392779] env[63371]: _type = "Task" [ 1828.392779] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.403300] env[63371]: DEBUG oslo_vmware.api [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774857, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.490633] env[63371]: DEBUG nova.network.neutron [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Successfully updated port: 39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1828.531578] env[63371]: DEBUG nova.objects.instance [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lazy-loading 'flavor' on Instance uuid 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1828.581254] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774855, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.665638] env[63371]: DEBUG nova.compute.utils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1828.667049] env[63371]: DEBUG nova.compute.manager [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1828.667270] env[63371]: DEBUG nova.network.neutron [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1828.707106] env[63371]: DEBUG nova.policy [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09b4f1693ef54996899c199362970fe3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '026682964c784968a24e654531c14aa9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1828.745155] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774856, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.782191] env[63371]: DEBUG oslo_concurrency.lockutils [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Releasing lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1828.782467] env[63371]: DEBUG nova.compute.manager [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Inject network info {{(pid=63371) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1828.782740] env[63371]: DEBUG nova.compute.manager [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] network_info to inject: |[{"id": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "address": "fa:16:3e:d1:7d:4b", "network": {"id": "1f2162b2-ad92-4b6d-9e1c-02a0d65dd460", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-529696742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40f26a0147d245e59fa8a860280852e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd28ee6f-5e", "ovs_interfaceid": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1828.788424] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Reconfiguring VM instance to set the machine id {{(pid=63371) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1828.788812] env[63371]: DEBUG oslo_concurrency.lockutils [req-9eddc791-94d9-4340-be21-599a459e3b77 req-4a22914b-24e5-4073-b886-da0162e769da service nova] Acquired lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.789016] env[63371]: DEBUG nova.network.neutron [req-9eddc791-94d9-4340-be21-599a459e3b77 req-4a22914b-24e5-4073-b886-da0162e769da service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Refreshing network info cache for port dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1828.790344] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f5d9beff-c6f5-4e6e-b7c2-acc9b17110cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.810067] env[63371]: DEBUG oslo_vmware.api [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for the task: (returnval){ [ 1828.810067] env[63371]: value = "task-1774858" [ 1828.810067] env[63371]: _type = "Task" [ 1828.810067] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.819566] env[63371]: DEBUG oslo_vmware.api [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774858, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.906927] env[63371]: DEBUG oslo_vmware.api [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774857, 'name': ReconfigVM_Task, 'duration_secs': 0.266945} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.907371] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368481', 'volume_id': '09edb13c-bd3b-408a-9cb8-685550cd6225', 'name': 'volume-09edb13c-bd3b-408a-9cb8-685550cd6225', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'da4839fa-8597-411c-b30c-0ac9226fec1f', 'attached_at': '', 'detached_at': '', 'volume_id': '09edb13c-bd3b-408a-9cb8-685550cd6225', 'serial': '09edb13c-bd3b-408a-9cb8-685550cd6225'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1828.994935] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.995143] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.996247] env[63371]: DEBUG nova.network.neutron [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1829.001648] env[63371]: DEBUG nova.network.neutron [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Successfully created port: 9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1829.009872] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.010237] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.010454] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.010640] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.010843] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.013116] env[63371]: INFO nova.compute.manager [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Terminating instance [ 1829.014837] env[63371]: DEBUG nova.compute.manager [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1829.015058] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1829.015947] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa835da1-7d7e-404e-8023-5e36e2f4ec81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.029045] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1829.029487] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f00a254-8a9e-430c-b97d-a0995941f399 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.036873] env[63371]: DEBUG oslo_vmware.api [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1829.036873] env[63371]: value = "task-1774859" [ 1829.036873] env[63371]: _type = "Task" [ 1829.036873] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.037181] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquiring lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.046309] env[63371]: DEBUG oslo_vmware.api [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774859, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.081685] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774855, 'name': ReconfigVM_Task, 'duration_secs': 0.828974} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.081954] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 1689fc63-3c07-4517-bbef-0011d860e9fc/1689fc63-3c07-4517-bbef-0011d860e9fc.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1829.083296] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a4681ec-23cb-4504-93b0-4d0615ee5265 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.089632] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1829.089632] env[63371]: value = "task-1774860" [ 1829.089632] env[63371]: _type = "Task" [ 1829.089632] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.098138] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774860, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.119704] env[63371]: DEBUG nova.network.neutron [req-9eddc791-94d9-4340-be21-599a459e3b77 req-4a22914b-24e5-4073-b886-da0162e769da service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Updated VIF entry in instance network info cache for port dd28ee6f-5efa-4009-842b-c1c9af10f8ea. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1829.120179] env[63371]: DEBUG nova.network.neutron [req-9eddc791-94d9-4340-be21-599a459e3b77 req-4a22914b-24e5-4073-b886-da0162e769da service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Updating instance_info_cache with network_info: [{"id": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "address": "fa:16:3e:d1:7d:4b", "network": {"id": "1f2162b2-ad92-4b6d-9e1c-02a0d65dd460", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-529696742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40f26a0147d245e59fa8a860280852e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd28ee6f-5e", "ovs_interfaceid": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.170576] env[63371]: DEBUG nova.compute.manager [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1829.253909] env[63371]: DEBUG oslo_vmware.api [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774856, 'name': PowerOnVM_Task, 'duration_secs': 0.663235} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.255734] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1829.257079] env[63371]: INFO nova.compute.manager [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Took 8.04 seconds to spawn the instance on the hypervisor. [ 1829.257284] env[63371]: DEBUG nova.compute.manager [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1829.260432] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c122cb-a624-4fe0-91a8-2d8b09e22be7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.321053] env[63371]: DEBUG oslo_vmware.api [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774858, 'name': ReconfigVM_Task, 'duration_secs': 0.180281} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.321476] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-526eaaac-70d6-4918-8db5-f01ae931e74a tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Reconfigured VM instance to set the machine id {{(pid=63371) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1829.436019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee493e7-fb76-4bfb-8138-8c1275940f83 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.444657] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ae5d026-70b7-4d4f-b836-251bf4de8c10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.478023] env[63371]: DEBUG nova.objects.instance [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lazy-loading 'flavor' on Instance uuid da4839fa-8597-411c-b30c-0ac9226fec1f {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1829.479297] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292a16a8-8316-4267-8ef5-279b898ead74 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.486954] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94099276-44b3-4426-a354-13f4a9aeed91 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.502304] env[63371]: DEBUG nova.compute.provider_tree [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1829.537100] env[63371]: DEBUG nova.network.neutron [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1829.547519] env[63371]: DEBUG oslo_vmware.api [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774859, 'name': PowerOffVM_Task, 'duration_secs': 0.18342} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.547771] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1829.547928] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1829.552017] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-486c4bec-aafd-4e4c-babe-915c1e2b319e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.600048] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774860, 'name': Rename_Task, 'duration_secs': 0.168552} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.601926] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1829.602216] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47ebf963-ed2b-4e79-8913-721b1707e1f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.610032] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1829.610032] env[63371]: value = "task-1774862" [ 1829.610032] env[63371]: _type = "Task" [ 1829.610032] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.616988] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774862, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.623022] env[63371]: DEBUG oslo_concurrency.lockutils [req-9eddc791-94d9-4340-be21-599a459e3b77 req-4a22914b-24e5-4073-b886-da0162e769da service nova] Releasing lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1829.623022] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquired lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.707500] env[63371]: DEBUG nova.network.neutron [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updating instance_info_cache with network_info: [{"id": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "address": "fa:16:3e:7a:b8:42", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39fe8c75-7a", "ovs_interfaceid": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1829.776733] env[63371]: INFO nova.compute.manager [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Took 34.35 seconds to build instance. [ 1829.823234] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1829.823481] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1829.823654] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleting the datastore file [datastore1] 407d1ef8-c5df-4277-b503-0d09cdaf8ef1 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1829.823929] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9f1bc73-9e24-40a6-86e1-96feaa1bebc5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.833180] env[63371]: DEBUG oslo_vmware.api [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1829.833180] env[63371]: value = "task-1774863" [ 1829.833180] env[63371]: _type = "Task" [ 1829.833180] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.842792] env[63371]: DEBUG oslo_vmware.api [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774863, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.008461] env[63371]: DEBUG nova.scheduler.client.report [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1830.026011] env[63371]: DEBUG nova.network.neutron [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1830.120106] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774862, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.184020] env[63371]: DEBUG nova.compute.manager [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1830.208828] env[63371]: DEBUG nova.virt.hardware [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1830.209264] env[63371]: DEBUG nova.virt.hardware [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1830.209349] env[63371]: DEBUG nova.virt.hardware [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1830.209518] env[63371]: DEBUG nova.virt.hardware [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1830.209683] env[63371]: DEBUG nova.virt.hardware [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1830.209878] env[63371]: DEBUG nova.virt.hardware [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1830.210041] env[63371]: DEBUG nova.virt.hardware [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1830.210203] env[63371]: DEBUG nova.virt.hardware [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1830.210406] env[63371]: DEBUG nova.virt.hardware [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1830.210581] env[63371]: DEBUG nova.virt.hardware [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1830.210755] env[63371]: DEBUG nova.virt.hardware [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1830.211275] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.211552] env[63371]: DEBUG nova.compute.manager [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Instance network_info: |[{"id": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "address": "fa:16:3e:7a:b8:42", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39fe8c75-7a", "ovs_interfaceid": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1830.212373] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e76d9764-6d7f-4bb6-b50e-72c7c1541fde {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.215227] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:b8:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ba3bd22-c936-470e-89bd-b3a5587e87a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '39fe8c75-7aaa-42da-a231-9c68310ef7c8', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1830.223158] env[63371]: DEBUG oslo.service.loopingcall [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1830.223371] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1830.224046] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34c131ad-378b-428c-af80-b975ef0897aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.243480] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0e1a78-f7e1-47a3-aab3-54e2c2c80a2b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.248091] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1830.248091] env[63371]: value = "task-1774864" [ 1830.248091] env[63371]: _type = "Task" [ 1830.248091] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.262874] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774864, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.278458] env[63371]: DEBUG oslo_concurrency.lockutils [None req-a2a221b0-3b30-42cf-9a83-f5940768547f tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.857s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.343581] env[63371]: DEBUG oslo_vmware.api [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774863, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.169372} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.343862] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1830.344085] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1830.344306] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1830.344487] env[63371]: INFO nova.compute.manager [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1830.344729] env[63371]: DEBUG oslo.service.loopingcall [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1830.344943] env[63371]: DEBUG nova.compute.manager [-] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1830.345031] env[63371]: DEBUG nova.network.neutron [-] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1830.388788] env[63371]: DEBUG nova.compute.manager [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Received event network-changed-39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1830.388788] env[63371]: DEBUG nova.compute.manager [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Refreshing instance network info cache due to event network-changed-39fe8c75-7aaa-42da-a231-9c68310ef7c8. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1830.389034] env[63371]: DEBUG oslo_concurrency.lockutils [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] Acquiring lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1830.389184] env[63371]: DEBUG oslo_concurrency.lockutils [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] Acquired lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1830.390176] env[63371]: DEBUG nova.network.neutron [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Refreshing network info cache for port 39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1830.489618] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eb433f19-3b5d-4748-b78c-dc64de8c577d tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.276s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.513708] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.353s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.513708] env[63371]: DEBUG nova.compute.manager [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1830.515935] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.788s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.516178] env[63371]: DEBUG nova.objects.instance [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lazy-loading 'resources' on Instance uuid d042bb16-c84d-42bb-af3f-38c08995fd91 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1830.623674] env[63371]: DEBUG oslo_vmware.api [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774862, 'name': PowerOnVM_Task, 'duration_secs': 0.797295} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.623933] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1830.624172] env[63371]: INFO nova.compute.manager [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Took 7.06 seconds to spawn the instance on the hypervisor. [ 1830.624360] env[63371]: DEBUG nova.compute.manager [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1830.625332] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940b9acf-642e-46c4-9080-12967e845bed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.758351] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774864, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.961298] env[63371]: DEBUG nova.network.neutron [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Successfully updated port: 9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1830.976822] env[63371]: DEBUG nova.network.neutron [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Updating instance_info_cache with network_info: [{"id": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "address": "fa:16:3e:d1:7d:4b", "network": {"id": "1f2162b2-ad92-4b6d-9e1c-02a0d65dd460", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-529696742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40f26a0147d245e59fa8a860280852e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd28ee6f-5e", "ovs_interfaceid": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1831.021576] env[63371]: DEBUG nova.compute.utils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1831.030515] env[63371]: DEBUG nova.compute.manager [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1831.030718] env[63371]: DEBUG nova.network.neutron [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1831.095908] env[63371]: DEBUG nova.network.neutron [-] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1831.107686] env[63371]: DEBUG nova.policy [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '739c32320b904c76ace3302c40b1d627', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5942787fa9584e8fbf5ddd459907ce5d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1831.148499] env[63371]: INFO nova.compute.manager [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Took 31.38 seconds to build instance. [ 1831.188791] env[63371]: DEBUG nova.network.neutron [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updated VIF entry in instance network info cache for port 39fe8c75-7aaa-42da-a231-9c68310ef7c8. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1831.188791] env[63371]: DEBUG nova.network.neutron [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updating instance_info_cache with network_info: [{"id": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "address": "fa:16:3e:7a:b8:42", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39fe8c75-7a", "ovs_interfaceid": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1831.260762] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774864, 'name': CreateVM_Task, 'duration_secs': 0.599361} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.260762] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1831.261623] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.261995] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.262343] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1831.262651] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52ae484c-79cc-499e-8065-678ab816688b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.267598] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1831.267598] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521ca7e5-a103-85dc-d709-5c2869609b02" [ 1831.267598] env[63371]: _type = "Task" [ 1831.267598] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.277106] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521ca7e5-a103-85dc-d709-5c2869609b02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.287119] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b264a31-4136-444c-8e49-510a9d61ab27 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.294272] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243f3e5b-914d-4565-aab1-77c5d16c3f89 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.325368] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec87a8ae-ea46-4910-900a-7dfe13182398 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.333443] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1ecbd6-9271-4069-81e6-053ee0634400 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.348049] env[63371]: DEBUG nova.compute.provider_tree [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1831.465792] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "refresh_cache-a27ab7ba-481c-4292-a885-5dc8d8653d0b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.465956] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquired lock "refresh_cache-a27ab7ba-481c-4292-a885-5dc8d8653d0b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.466126] env[63371]: DEBUG nova.network.neutron [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1831.479469] env[63371]: DEBUG oslo_concurrency.lockutils [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Releasing lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.479794] env[63371]: DEBUG nova.compute.manager [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Inject network info {{(pid=63371) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1831.480102] env[63371]: DEBUG nova.compute.manager [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] network_info to inject: |[{"id": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "address": "fa:16:3e:d1:7d:4b", "network": {"id": "1f2162b2-ad92-4b6d-9e1c-02a0d65dd460", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-529696742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40f26a0147d245e59fa8a860280852e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd28ee6f-5e", "ovs_interfaceid": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7221}} [ 1831.487702] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Reconfiguring VM instance to set the machine id {{(pid=63371) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1831.488336] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-578bda00-8fe6-4a20-998f-ef95245ceca1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.505234] env[63371]: DEBUG oslo_vmware.api [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for the task: (returnval){ [ 1831.505234] env[63371]: value = "task-1774865" [ 1831.505234] env[63371]: _type = "Task" [ 1831.505234] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.516012] env[63371]: DEBUG oslo_vmware.api [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774865, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.531181] env[63371]: DEBUG nova.compute.manager [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1831.544931] env[63371]: DEBUG nova.compute.manager [req-1ed5917b-8545-4b89-b76b-6fab1e062134 req-93300c4c-1afe-48c8-aab9-775aa0f5787a service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Received event network-changed-a22c781d-8374-4914-8e01-d61b8df475a7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1831.545146] env[63371]: DEBUG nova.compute.manager [req-1ed5917b-8545-4b89-b76b-6fab1e062134 req-93300c4c-1afe-48c8-aab9-775aa0f5787a service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Refreshing instance network info cache due to event network-changed-a22c781d-8374-4914-8e01-d61b8df475a7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1831.545369] env[63371]: DEBUG oslo_concurrency.lockutils [req-1ed5917b-8545-4b89-b76b-6fab1e062134 req-93300c4c-1afe-48c8-aab9-775aa0f5787a service nova] Acquiring lock "refresh_cache-da2e3b05-9cb0-49bb-8945-924e48cf3431" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.545537] env[63371]: DEBUG oslo_concurrency.lockutils [req-1ed5917b-8545-4b89-b76b-6fab1e062134 req-93300c4c-1afe-48c8-aab9-775aa0f5787a service nova] Acquired lock "refresh_cache-da2e3b05-9cb0-49bb-8945-924e48cf3431" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.545705] env[63371]: DEBUG nova.network.neutron [req-1ed5917b-8545-4b89-b76b-6fab1e062134 req-93300c4c-1afe-48c8-aab9-775aa0f5787a service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Refreshing network info cache for port a22c781d-8374-4914-8e01-d61b8df475a7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1831.602665] env[63371]: INFO nova.compute.manager [-] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Took 1.26 seconds to deallocate network for instance. [ 1831.622885] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "da4839fa-8597-411c-b30c-0ac9226fec1f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.622885] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.622885] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "da4839fa-8597-411c-b30c-0ac9226fec1f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.622885] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.622885] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.625438] env[63371]: INFO nova.compute.manager [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Terminating instance [ 1831.627416] env[63371]: DEBUG nova.compute.manager [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1831.627762] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1831.628705] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf71015-5af7-4dc6-a943-ccc982a85be9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.637822] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1831.638303] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b3c5b41-5fc3-4fa4-9fdc-ee992a445e9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.642157] env[63371]: DEBUG nova.network.neutron [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Successfully created port: baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1831.650285] env[63371]: DEBUG oslo_concurrency.lockutils [None req-43637b38-f46c-4e46-8639-783d810a6ef4 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.890s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.650729] env[63371]: DEBUG oslo_vmware.api [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1831.650729] env[63371]: value = "task-1774866" [ 1831.650729] env[63371]: _type = "Task" [ 1831.650729] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.659708] env[63371]: DEBUG oslo_vmware.api [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774866, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.692971] env[63371]: DEBUG oslo_concurrency.lockutils [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] Releasing lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.692971] env[63371]: DEBUG nova.compute.manager [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Received event network-changed-dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1831.692971] env[63371]: DEBUG nova.compute.manager [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Refreshing instance network info cache due to event network-changed-dd28ee6f-5efa-4009-842b-c1c9af10f8ea. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1831.692971] env[63371]: DEBUG oslo_concurrency.lockutils [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] Acquiring lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.692971] env[63371]: DEBUG oslo_concurrency.lockutils [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] Acquired lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.692971] env[63371]: DEBUG nova.network.neutron [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Refreshing network info cache for port dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1831.780523] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521ca7e5-a103-85dc-d709-5c2869609b02, 'name': SearchDatastore_Task, 'duration_secs': 0.010004} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.781409] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.781811] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1831.782338] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1831.782643] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.782937] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1831.783408] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-787d56ec-5ac2-494f-bf5a-cf3b7b191972 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.792480] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1831.792911] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1831.795093] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3608ef79-adc2-42fb-a95c-05c8c827387a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.804228] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1831.804228] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c5452c-b6f0-f085-7b99-7afca15b0067" [ 1831.804228] env[63371]: _type = "Task" [ 1831.804228] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.817792] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c5452c-b6f0-f085-7b99-7afca15b0067, 'name': SearchDatastore_Task, 'duration_secs': 0.008338} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.819774] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21ba6460-9d81-4502-9872-7fcf5b7ae24f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.829163] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1831.829163] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524b767a-ca56-a33a-50d6-81b85260d1af" [ 1831.829163] env[63371]: _type = "Task" [ 1831.829163] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.838188] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524b767a-ca56-a33a-50d6-81b85260d1af, 'name': SearchDatastore_Task, 'duration_secs': 0.008471} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.838626] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.838985] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce/3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1831.839461] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a821f45-c6f1-4dd4-ae56-76acbf1170ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.849312] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1831.849312] env[63371]: value = "task-1774867" [ 1831.849312] env[63371]: _type = "Task" [ 1831.849312] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.852588] env[63371]: DEBUG nova.scheduler.client.report [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1831.862772] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774867, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.015058] env[63371]: DEBUG nova.network.neutron [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1832.023008] env[63371]: DEBUG oslo_vmware.api [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774865, 'name': ReconfigVM_Task, 'duration_secs': 0.17523} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.027475] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-dc69079d-003c-4162-9bde-3cca8aba4def tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Reconfigured VM instance to set the machine id {{(pid=63371) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1832.112050] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.161833] env[63371]: DEBUG oslo_vmware.api [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774866, 'name': PowerOffVM_Task, 'duration_secs': 0.286128} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.162140] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1832.162311] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1832.162575] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d110f61b-87b2-45fd-b1c3-e8d2d9e1427e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.238829] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1832.239537] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1832.239729] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Deleting the datastore file [datastore1] da4839fa-8597-411c-b30c-0ac9226fec1f {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1832.239941] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4e7bc4d-275b-470b-8df0-392921d0e95e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.247467] env[63371]: DEBUG oslo_vmware.api [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1832.247467] env[63371]: value = "task-1774869" [ 1832.247467] env[63371]: _type = "Task" [ 1832.247467] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.256866] env[63371]: DEBUG oslo_vmware.api [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.301251] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquiring lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.301535] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.301847] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquiring lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.301941] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.302870] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.305347] env[63371]: INFO nova.compute.manager [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Terminating instance [ 1832.308954] env[63371]: DEBUG nova.compute.manager [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1832.309281] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1832.310117] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e90b28-0461-49b2-9be7-88dfd920c8e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.317455] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1832.317696] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-699b3edd-ca31-46a7-a485-a60cf5a45d68 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.324329] env[63371]: DEBUG oslo_vmware.api [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for the task: (returnval){ [ 1832.324329] env[63371]: value = "task-1774870" [ 1832.324329] env[63371]: _type = "Task" [ 1832.324329] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.345142] env[63371]: DEBUG oslo_vmware.api [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774870, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.358894] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.843s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.360914] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774867, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489995} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.361656] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.709s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.363457] env[63371]: INFO nova.compute.claims [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1832.366207] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce/3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1832.366207] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1832.369419] env[63371]: DEBUG nova.network.neutron [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Updating instance_info_cache with network_info: [{"id": "9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2", "address": "fa:16:3e:1c:a6:2b", "network": {"id": "37c74e18-5c2a-4df3-b429-8a4fb9f29cc0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1560662466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "026682964c784968a24e654531c14aa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9df7ecd9-de", "ovs_interfaceid": "9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.370030] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cb1b964d-f314-4f65-9b2d-008d67b7b2c8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.377384] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1832.377384] env[63371]: value = "task-1774871" [ 1832.377384] env[63371]: _type = "Task" [ 1832.377384] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.389630] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774871, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.391248] env[63371]: INFO nova.scheduler.client.report [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Deleted allocations for instance d042bb16-c84d-42bb-af3f-38c08995fd91 [ 1832.434470] env[63371]: DEBUG nova.compute.manager [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Received event network-vif-deleted-e09a5b9d-78bf-4d1c-98f8-434a0c37c88d {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1832.434711] env[63371]: DEBUG nova.compute.manager [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Received event network-vif-plugged-9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1832.434907] env[63371]: DEBUG oslo_concurrency.lockutils [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] Acquiring lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.435135] env[63371]: DEBUG oslo_concurrency.lockutils [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] Lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.435311] env[63371]: DEBUG oslo_concurrency.lockutils [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] Lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.435480] env[63371]: DEBUG nova.compute.manager [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] No waiting events found dispatching network-vif-plugged-9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1832.435647] env[63371]: WARNING nova.compute.manager [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Received unexpected event network-vif-plugged-9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2 for instance with vm_state building and task_state spawning. [ 1832.435809] env[63371]: DEBUG nova.compute.manager [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Received event network-changed-9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1832.435963] env[63371]: DEBUG nova.compute.manager [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Refreshing instance network info cache due to event network-changed-9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1832.436152] env[63371]: DEBUG oslo_concurrency.lockutils [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] Acquiring lock "refresh_cache-a27ab7ba-481c-4292-a885-5dc8d8653d0b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1832.518129] env[63371]: DEBUG nova.network.neutron [req-1ed5917b-8545-4b89-b76b-6fab1e062134 req-93300c4c-1afe-48c8-aab9-775aa0f5787a service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Updated VIF entry in instance network info cache for port a22c781d-8374-4914-8e01-d61b8df475a7. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1832.518743] env[63371]: DEBUG nova.network.neutron [req-1ed5917b-8545-4b89-b76b-6fab1e062134 req-93300c4c-1afe-48c8-aab9-775aa0f5787a service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Updating instance_info_cache with network_info: [{"id": "a22c781d-8374-4914-8e01-d61b8df475a7", "address": "fa:16:3e:d9:60:17", "network": {"id": "462b4670-dcf4-402c-93ad-9cbeb6f9ff2b", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1819409172-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.173", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f3df339d9a704d9b9bebecac3871584c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa22c781d-83", "ovs_interfaceid": "a22c781d-8374-4914-8e01-d61b8df475a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.525587] env[63371]: DEBUG nova.network.neutron [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Updated VIF entry in instance network info cache for port dd28ee6f-5efa-4009-842b-c1c9af10f8ea. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1832.525587] env[63371]: DEBUG nova.network.neutron [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Updating instance_info_cache with network_info: [{"id": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "address": "fa:16:3e:d1:7d:4b", "network": {"id": "1f2162b2-ad92-4b6d-9e1c-02a0d65dd460", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-529696742-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "40f26a0147d245e59fa8a860280852e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04e15990-16e1-4cb2-b0f0-06c362e68c5e", "external-id": "nsx-vlan-transportzone-555", "segmentation_id": 555, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd28ee6f-5e", "ovs_interfaceid": "dd28ee6f-5efa-4009-842b-c1c9af10f8ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.545870] env[63371]: DEBUG nova.compute.manager [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1832.578228] env[63371]: DEBUG nova.virt.hardware [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1832.578488] env[63371]: DEBUG nova.virt.hardware [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1832.578643] env[63371]: DEBUG nova.virt.hardware [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1832.578821] env[63371]: DEBUG nova.virt.hardware [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1832.578963] env[63371]: DEBUG nova.virt.hardware [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1832.579122] env[63371]: DEBUG nova.virt.hardware [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1832.579329] env[63371]: DEBUG nova.virt.hardware [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1832.579487] env[63371]: DEBUG nova.virt.hardware [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1832.579636] env[63371]: DEBUG nova.virt.hardware [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1832.579795] env[63371]: DEBUG nova.virt.hardware [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1832.580455] env[63371]: DEBUG nova.virt.hardware [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1832.580851] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00abd5a6-d493-4ced-8946-23d882ca6367 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.589406] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efac6ce9-9b1d-4ce8-8090-2d29c30c416c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.758837] env[63371]: DEBUG oslo_vmware.api [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271645} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.761033] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1832.761033] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1832.761033] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1832.761033] env[63371]: INFO nova.compute.manager [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1832.761033] env[63371]: DEBUG oslo.service.loopingcall [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1832.761033] env[63371]: DEBUG nova.compute.manager [-] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1832.761033] env[63371]: DEBUG nova.network.neutron [-] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1832.835419] env[63371]: DEBUG oslo_vmware.api [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774870, 'name': PowerOffVM_Task, 'duration_secs': 0.23666} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.835841] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1832.836019] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1832.836333] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c52aaf8b-35e4-466e-9c08-03b73ecb88c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.875368] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Releasing lock "refresh_cache-a27ab7ba-481c-4292-a885-5dc8d8653d0b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1832.875654] env[63371]: DEBUG nova.compute.manager [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Instance network_info: |[{"id": "9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2", "address": "fa:16:3e:1c:a6:2b", "network": {"id": "37c74e18-5c2a-4df3-b429-8a4fb9f29cc0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1560662466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "026682964c784968a24e654531c14aa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9df7ecd9-de", "ovs_interfaceid": "9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1832.876158] env[63371]: DEBUG oslo_concurrency.lockutils [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] Acquired lock "refresh_cache-a27ab7ba-481c-4292-a885-5dc8d8653d0b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1832.877785] env[63371]: DEBUG nova.network.neutron [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Refreshing network info cache for port 9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1832.877785] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1c:a6:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e7a4976-597e-4636-990e-6062b5faadee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1832.885306] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Creating folder: Project (026682964c784968a24e654531c14aa9). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1832.888648] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-afdea227-b65e-459f-b7d7-9e7514ac5687 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.899190] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774871, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071643} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.899460] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1832.900949] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b31ba3c-7d33-483c-82f1-e1a0fa2a3e88 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.906898] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Created folder: Project (026682964c784968a24e654531c14aa9) in parent group-v368199. [ 1832.906898] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Creating folder: Instances. Parent ref: group-v368486. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1832.907835] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a92c0010-6a72-446b-be60-3954678cf33f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.909254] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37ea1da0-ab83-4df6-9db0-72eb3d7deb9b tempest-ServerDiskConfigTestJSON-625964520 tempest-ServerDiskConfigTestJSON-625964520-project-member] Lock "d042bb16-c84d-42bb-af3f-38c08995fd91" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.735s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1832.930771] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce/3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1832.933473] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f7f603a-cc90-4917-97b6-839dc2e3b241 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.955422] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Created folder: Instances in parent group-v368486. [ 1832.955422] env[63371]: DEBUG oslo.service.loopingcall [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1832.957415] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1832.957415] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e2ef95d0-02b3-4c2b-a1ee-7d26c1da54e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.973643] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1832.973643] env[63371]: value = "task-1774875" [ 1832.973643] env[63371]: _type = "Task" [ 1832.973643] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.979192] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1832.979192] env[63371]: value = "task-1774876" [ 1832.979192] env[63371]: _type = "Task" [ 1832.979192] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.987548] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774875, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.992699] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774876, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.025129] env[63371]: DEBUG oslo_concurrency.lockutils [req-1ed5917b-8545-4b89-b76b-6fab1e062134 req-93300c4c-1afe-48c8-aab9-775aa0f5787a service nova] Releasing lock "refresh_cache-da2e3b05-9cb0-49bb-8945-924e48cf3431" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.026919] env[63371]: DEBUG oslo_concurrency.lockutils [req-939403ac-0b9c-4631-ab85-54d83404afa2 req-6cd1dbc5-3238-4541-a6de-74fb58608cc3 service nova] Releasing lock "refresh_cache-3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.070787] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1833.071120] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1833.071249] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Deleting the datastore file [datastore1] 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1833.071528] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8fe5e3c2-322e-449c-81ac-86bca7f860e1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.086489] env[63371]: DEBUG oslo_vmware.api [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for the task: (returnval){ [ 1833.086489] env[63371]: value = "task-1774877" [ 1833.086489] env[63371]: _type = "Task" [ 1833.086489] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.097182] env[63371]: DEBUG oslo_vmware.api [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774877, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.306718] env[63371]: DEBUG nova.network.neutron [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Updated VIF entry in instance network info cache for port 9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1833.307094] env[63371]: DEBUG nova.network.neutron [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Updating instance_info_cache with network_info: [{"id": "9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2", "address": "fa:16:3e:1c:a6:2b", "network": {"id": "37c74e18-5c2a-4df3-b429-8a4fb9f29cc0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1560662466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "026682964c784968a24e654531c14aa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9df7ecd9-de", "ovs_interfaceid": "9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.459859] env[63371]: DEBUG nova.compute.manager [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Stashing vm_state: active {{(pid=63371) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1833.483984] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774875, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.496978] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774876, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.512578] env[63371]: DEBUG nova.network.neutron [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Successfully updated port: baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1833.578626] env[63371]: DEBUG nova.compute.manager [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Received event network-vif-plugged-baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1833.578722] env[63371]: DEBUG oslo_concurrency.lockutils [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] Acquiring lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.579753] env[63371]: DEBUG oslo_concurrency.lockutils [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.579753] env[63371]: DEBUG oslo_concurrency.lockutils [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.579753] env[63371]: DEBUG nova.compute.manager [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] No waiting events found dispatching network-vif-plugged-baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1833.579753] env[63371]: WARNING nova.compute.manager [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Received unexpected event network-vif-plugged-baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 for instance with vm_state building and task_state spawning. [ 1833.579976] env[63371]: DEBUG nova.compute.manager [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Received event network-vif-deleted-febc3a69-64cf-48c1-8399-147f35d89c61 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1833.579976] env[63371]: INFO nova.compute.manager [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Neutron deleted interface febc3a69-64cf-48c1-8399-147f35d89c61; detaching it from the instance and deleting it from the info cache [ 1833.580108] env[63371]: DEBUG nova.network.neutron [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.601848] env[63371]: DEBUG oslo_vmware.api [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Task: {'id': task-1774877, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146136} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.608024] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1833.608024] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1833.608024] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1833.608024] env[63371]: INFO nova.compute.manager [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1833.608024] env[63371]: DEBUG oslo.service.loopingcall [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1833.608024] env[63371]: DEBUG nova.compute.manager [-] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1833.608024] env[63371]: DEBUG nova.network.neutron [-] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1833.689240] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0edffe7-ca74-4b75-9404-c186f4c1db12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.697893] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620c9f94-e87e-472b-9732-12052468daa7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.734477] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35cf44b2-105e-446f-a42c-3fa828e92d2b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.742706] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1764264f-8b44-40fb-b12b-89d6b56d780b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.756991] env[63371]: DEBUG nova.compute.provider_tree [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1833.811507] env[63371]: DEBUG oslo_concurrency.lockutils [req-e790a005-789d-4b0b-a604-bed8fd936966 req-d283a0d2-7df2-4f90-9a58-8a7d24d75c5e service nova] Releasing lock "refresh_cache-a27ab7ba-481c-4292-a885-5dc8d8653d0b" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1833.989820] env[63371]: DEBUG nova.network.neutron [-] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.991433] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774875, 'name': ReconfigVM_Task, 'duration_secs': 0.604014} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.993385] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.998364] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce/3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1833.999559] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62693cde-a688-48ee-9c52-208a0b32ba52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.008560] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774876, 'name': CreateVM_Task, 'duration_secs': 0.601468} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.010735] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1834.011162] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1834.011162] env[63371]: value = "task-1774878" [ 1834.011162] env[63371]: _type = "Task" [ 1834.011162] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.011924] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.012153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.014233] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1834.014233] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-586400e4-2a0d-4c6e-8b44-9b4aea1fbefe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.018390] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.018516] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.018651] env[63371]: DEBUG nova.network.neutron [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1834.025863] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1834.025863] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52be8d9f-3882-8c69-4653-4cf4ca38064d" [ 1834.025863] env[63371]: _type = "Task" [ 1834.025863] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.029034] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774878, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.036058] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52be8d9f-3882-8c69-4653-4cf4ca38064d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.087772] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b00f88a2-734d-4d49-b07e-3edd6683d98f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.098084] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09230a93-334f-45c3-aab8-c23ca36638a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.132046] env[63371]: DEBUG nova.compute.manager [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Detach interface failed, port_id=febc3a69-64cf-48c1-8399-147f35d89c61, reason: Instance da4839fa-8597-411c-b30c-0ac9226fec1f could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1834.132331] env[63371]: DEBUG nova.compute.manager [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Received event network-changed-baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1834.132504] env[63371]: DEBUG nova.compute.manager [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Refreshing instance network info cache due to event network-changed-baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1834.133656] env[63371]: DEBUG oslo_concurrency.lockutils [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] Acquiring lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.264020] env[63371]: DEBUG nova.scheduler.client.report [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1834.492451] env[63371]: INFO nova.compute.manager [-] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Took 1.73 seconds to deallocate network for instance. [ 1834.526548] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774878, 'name': Rename_Task, 'duration_secs': 0.484433} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.526548] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1834.526548] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-20f2a551-a01f-4746-a866-130f2a7a387a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.542363] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52be8d9f-3882-8c69-4653-4cf4ca38064d, 'name': SearchDatastore_Task, 'duration_secs': 0.023013} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.544511] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1834.544755] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1834.544980] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.545559] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.545559] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1834.545862] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1834.545862] env[63371]: value = "task-1774879" [ 1834.545862] env[63371]: _type = "Task" [ 1834.545862] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.546566] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-121b7d0c-631e-4059-a539-6aafc8af39a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.557823] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774879, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.559367] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1834.559564] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1834.560676] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d51897cb-d3aa-4bed-9014-6f6afe4c6261 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.566073] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1834.566073] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520d0ef6-3d3e-f0df-a31a-ba71817f51f3" [ 1834.566073] env[63371]: _type = "Task" [ 1834.566073] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.577856] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520d0ef6-3d3e-f0df-a31a-ba71817f51f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.583209] env[63371]: DEBUG nova.network.neutron [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1834.739944] env[63371]: DEBUG nova.network.neutron [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updating instance_info_cache with network_info: [{"id": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "address": "fa:16:3e:38:f8:9a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaee0e3e-86", "ovs_interfaceid": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.768665] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.769297] env[63371]: DEBUG nova.compute.manager [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1834.772106] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.660s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1834.772328] env[63371]: DEBUG nova.objects.instance [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lazy-loading 'resources' on Instance uuid 407d1ef8-c5df-4277-b503-0d09cdaf8ef1 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1834.853316] env[63371]: DEBUG nova.network.neutron [-] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1834.908081] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1834.908081] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1835.000922] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1835.061607] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774879, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.076246] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520d0ef6-3d3e-f0df-a31a-ba71817f51f3, 'name': SearchDatastore_Task, 'duration_secs': 0.009307} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.077089] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b987dda5-5c83-4315-9f3f-32afabf0ef80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.084057] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1835.084057] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a7c971-2472-30f6-0882-a2e6ebac2604" [ 1835.084057] env[63371]: _type = "Task" [ 1835.084057] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.093184] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a7c971-2472-30f6-0882-a2e6ebac2604, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.245491] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.245491] env[63371]: DEBUG nova.compute.manager [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Instance network_info: |[{"id": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "address": "fa:16:3e:38:f8:9a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaee0e3e-86", "ovs_interfaceid": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1835.245685] env[63371]: DEBUG oslo_concurrency.lockutils [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] Acquired lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.245685] env[63371]: DEBUG nova.network.neutron [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Refreshing network info cache for port baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1835.246905] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:f8:9a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1835.256705] env[63371]: DEBUG oslo.service.loopingcall [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1835.257218] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1835.257737] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f19164d-047e-4287-8300-a40f4a5fe15b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.275807] env[63371]: DEBUG nova.compute.utils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1835.281467] env[63371]: DEBUG nova.compute.manager [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1835.281467] env[63371]: DEBUG nova.network.neutron [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1835.289744] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1835.289744] env[63371]: value = "task-1774880" [ 1835.289744] env[63371]: _type = "Task" [ 1835.289744] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.299115] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774880, 'name': CreateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.356767] env[63371]: INFO nova.compute.manager [-] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Took 1.75 seconds to deallocate network for instance. [ 1835.364882] env[63371]: DEBUG nova.policy [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd08b55f9fa3a45b0a8672e955ee360c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ceecd2a995cf4da0b4218e371065ca0b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1835.425282] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1835.425462] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1835.559730] env[63371]: DEBUG oslo_vmware.api [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774879, 'name': PowerOnVM_Task, 'duration_secs': 0.788359} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.562362] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1835.562595] env[63371]: INFO nova.compute.manager [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Took 7.75 seconds to spawn the instance on the hypervisor. [ 1835.562774] env[63371]: DEBUG nova.compute.manager [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1835.563791] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eaf9cb2-9bdf-4330-ac32-b15adf339ee5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.576290] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-291abbcd-ef63-40f4-af43-c2881217f698 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.583762] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09c11be-dbea-4529-af54-f076ab046d75 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.601282] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a7c971-2472-30f6-0882-a2e6ebac2604, 'name': SearchDatastore_Task, 'duration_secs': 0.010924} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.626598] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.626930] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] a27ab7ba-481c-4292-a885-5dc8d8653d0b/a27ab7ba-481c-4292-a885-5dc8d8653d0b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1835.629055] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1a006a36-f324-415a-b311-81f23f56c63f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.631720] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8cbe3c9-2d24-4113-bfc6-7db69b1f1441 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.635265] env[63371]: DEBUG nova.compute.manager [req-2cc78b2f-ff35-407b-a796-3069426dfe94 req-f4bdb9d0-9d15-4241-92b2-c3efc9066289 service nova] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Received event network-vif-deleted-dd28ee6f-5efa-4009-842b-c1c9af10f8ea {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1835.642997] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae61d05-e836-45f7-ab1a-3083111cd79d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.647176] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1835.647176] env[63371]: value = "task-1774881" [ 1835.647176] env[63371]: _type = "Task" [ 1835.647176] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.659540] env[63371]: DEBUG nova.compute.provider_tree [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1835.665647] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774881, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.780636] env[63371]: DEBUG nova.compute.manager [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1835.798076] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774880, 'name': CreateVM_Task, 'duration_secs': 0.353299} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1835.798299] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1835.799049] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.799189] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.799525] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1835.801010] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef86fdd8-e07a-49c3-a66b-f2c97aa49eba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.805206] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1835.805206] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]521c046d-f77b-d7ee-9421-ccb1e5b332b0" [ 1835.805206] env[63371]: _type = "Task" [ 1835.805206] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.817811] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521c046d-f77b-d7ee-9421-ccb1e5b332b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.832613] env[63371]: DEBUG nova.network.neutron [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Successfully created port: 5a88ea10-929b-41c9-b1b4-bf61377715c6 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1835.869496] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.083329] env[63371]: INFO nova.compute.manager [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Took 24.41 seconds to build instance. [ 1836.161584] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774881, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492283} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.161584] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] a27ab7ba-481c-4292-a885-5dc8d8653d0b/a27ab7ba-481c-4292-a885-5dc8d8653d0b.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1836.161584] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1836.161584] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ba175e8-ccfd-4ab7-a2c2-40af6200c92e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.163766] env[63371]: DEBUG nova.scheduler.client.report [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1836.172282] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1836.172282] env[63371]: value = "task-1774882" [ 1836.172282] env[63371]: _type = "Task" [ 1836.172282] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.182876] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774882, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.199789] env[63371]: DEBUG nova.network.neutron [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updated VIF entry in instance network info cache for port baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1836.200171] env[63371]: DEBUG nova.network.neutron [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updating instance_info_cache with network_info: [{"id": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "address": "fa:16:3e:38:f8:9a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaee0e3e-86", "ovs_interfaceid": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.317846] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]521c046d-f77b-d7ee-9421-ccb1e5b332b0, 'name': SearchDatastore_Task, 'duration_secs': 0.075518} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.318013] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1836.318268] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1836.318519] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.318668] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.318847] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1836.319141] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a7a0a54-317e-43fa-92f0-80f947bb6880 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.328599] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1836.328721] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1836.330333] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01b7e15c-87c3-4dc6-b52c-b34638107aa3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.338155] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1836.338155] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d39ec1-181a-af93-82be-44ffab93bd47" [ 1836.338155] env[63371]: _type = "Task" [ 1836.338155] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.346540] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d39ec1-181a-af93-82be-44ffab93bd47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.434786] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 1836.435084] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.435266] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.435571] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.435805] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.435989] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.436106] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_power_states {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.586722] env[63371]: DEBUG oslo_concurrency.lockutils [None req-30685c84-b359-45fd-87c4-5a8c6b144508 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.924s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.672109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.897s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.672489] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.679s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.685924] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774882, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068513} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.686528] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1836.687520] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970f7c19-db82-4312-819c-164970fb213e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.712514] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] a27ab7ba-481c-4292-a885-5dc8d8653d0b/a27ab7ba-481c-4292-a885-5dc8d8653d0b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1836.714835] env[63371]: INFO nova.scheduler.client.report [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted allocations for instance 407d1ef8-c5df-4277-b503-0d09cdaf8ef1 [ 1836.716378] env[63371]: DEBUG oslo_concurrency.lockutils [req-16bd1c17-8fcc-4f65-af13-1d8b031ffb75 req-58113a7a-21f2-448b-bdfa-90db041e6119 service nova] Releasing lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1836.716922] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3d9f490-2b15-4c24-a8ff-9722fb59bd4d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.743021] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1836.743021] env[63371]: value = "task-1774883" [ 1836.743021] env[63371]: _type = "Task" [ 1836.743021] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.755013] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774883, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.793055] env[63371]: DEBUG nova.compute.manager [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1836.830364] env[63371]: DEBUG nova.virt.hardware [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1836.830732] env[63371]: DEBUG nova.virt.hardware [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1836.831662] env[63371]: DEBUG nova.virt.hardware [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1836.831662] env[63371]: DEBUG nova.virt.hardware [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1836.831662] env[63371]: DEBUG nova.virt.hardware [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1836.831662] env[63371]: DEBUG nova.virt.hardware [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1836.832557] env[63371]: DEBUG nova.virt.hardware [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1836.832557] env[63371]: DEBUG nova.virt.hardware [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1836.832557] env[63371]: DEBUG nova.virt.hardware [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1836.832760] env[63371]: DEBUG nova.virt.hardware [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1836.833562] env[63371]: DEBUG nova.virt.hardware [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1836.834320] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13f5e06-aac6-4c0e-a0f4-d7c68c802f67 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.849191] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d39ec1-181a-af93-82be-44ffab93bd47, 'name': SearchDatastore_Task, 'duration_secs': 0.008641} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.851876] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-327b9486-9e7d-4ac3-a699-3d94bad5de35 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.855354] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26dcf238-556a-40a5-9433-1392e25a8744 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.863434] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1836.863434] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52949cbe-390c-a4d0-b53d-70bfda35ab24" [ 1836.863434] env[63371]: _type = "Task" [ 1836.863434] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.881476] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52949cbe-390c-a4d0-b53d-70bfda35ab24, 'name': SearchDatastore_Task, 'duration_secs': 0.010171} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.881919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1836.882246] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] ff724a9f-5e9a-4683-8eb3-058fb3639ea5/ff724a9f-5e9a-4683-8eb3-058fb3639ea5.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1836.882532] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14ad888a-8ac4-4b6d-929e-3a9b6853353f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.889747] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1836.889747] env[63371]: value = "task-1774884" [ 1836.889747] env[63371]: _type = "Task" [ 1836.889747] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.898627] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774884, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.945128] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Getting list of instances from cluster (obj){ [ 1836.945128] env[63371]: value = "domain-c8" [ 1836.945128] env[63371]: _type = "ClusterComputeResource" [ 1836.945128] env[63371]: } {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1836.946512] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f4ab3b-eecb-4bf2-b97e-28c9934131c7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.969687] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Got total of 12 instances {{(pid=63371) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1836.969930] env[63371]: WARNING nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] While synchronizing instance power states, found 16 instances in the database and 12 instances on the hypervisor. [ 1836.970137] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.970395] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid 6df9af10-0053-4696-920a-10ab2af67ef5 {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.970616] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid 0c9156ea-81c4-4286-a20b-66068a5bce59 {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.970840] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid da4839fa-8597-411c-b30c-0ac9226fec1f {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.971078] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7 {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.971311] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid 0518c5a8-8cc1-4829-a0cf-5f5904f6df86 {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.971554] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid 1ec21edd-7b7c-4a2b-983f-8aa6c022e033 {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.971811] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid 9885de9e-c640-4d82-a47a-980988d89deb {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.971963] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.972177] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid 407d1ef8-c5df-4277-b503-0d09cdaf8ef1 {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.972389] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid da2e3b05-9cb0-49bb-8945-924e48cf3431 {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.972594] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid 1689fc63-3c07-4517-bbef-0011d860e9fc {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.972799] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.973063] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid a27ab7ba-481c-4292-a885-5dc8d8653d0b {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.973250] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid ff724a9f-5e9a-4683-8eb3-058fb3639ea5 {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.973467] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Triggering sync for uuid 05f6f94a-c9c4-4737-8b07-77e9c2093497 {{(pid=63371) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10339}} [ 1836.973952] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.974274] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.974608] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "6df9af10-0053-4696-920a-10ab2af67ef5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.974860] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "6df9af10-0053-4696-920a-10ab2af67ef5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.976040] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "0c9156ea-81c4-4286-a20b-66068a5bce59" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.976438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "0c9156ea-81c4-4286-a20b-66068a5bce59" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.976711] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "da4839fa-8597-411c-b30c-0ac9226fec1f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.976935] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.978350] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.978786] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.002s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.979118] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.979546] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.980183] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "9885de9e-c640-4d82-a47a-980988d89deb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.980445] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "9885de9e-c640-4d82-a47a-980988d89deb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.981400] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.981671] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.982203] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.983026] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.983410] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.983769] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "1689fc63-3c07-4517-bbef-0011d860e9fc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.984337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.984636] env[63371]: INFO nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] During sync_power_state the instance has a pending task (resize_prep). Skip. [ 1836.984861] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.985148] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.985679] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1836.986973] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.987256] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.987487] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.987685] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1836.987832] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1836.989081] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb54c2fb-a149-4849-abca-1776a167c8cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.994615] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4076c8a6-fdca-470b-a7df-566e8695569a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.998168] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b30a8e-2433-475e-bc6b-57995e4f0342 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.001365] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3605d473-8167-44af-b208-299f37e0f0ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.004457] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e4c34c-590a-461f-864d-2de168f6c4ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.007287] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce0fbf3-5eb4-471f-ae0c-e4c98e15a1f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.012933] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05c53ce-d2c2-4722-a6e4-f949c7ca8138 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.016282] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f06fdf6-cefc-4763-9aa6-3adc4ebe5915 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.021255] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c8e657-0228-4da9-bec4-dee3d41a7ec6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.023314] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1837.060059] env[63371]: WARNING oslo_messaging._drivers.amqpdriver [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1837.179148] env[63371]: INFO nova.compute.claims [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1837.249364] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ee374c8-88ad-49e5-b9cb-e9751961c055 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.239s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.250607] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.268s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.251463] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4158fad-ad07-4981-82dc-296d546296f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.261376] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774883, 'name': ReconfigVM_Task, 'duration_secs': 0.296473} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.261376] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Reconfigured VM instance instance-0000006e to attach disk [datastore1] a27ab7ba-481c-4292-a885-5dc8d8653d0b/a27ab7ba-481c-4292-a885-5dc8d8653d0b.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1837.261376] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-002cb08d-7f10-4a6c-adf2-f1b790853b91 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.270859] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aff7a50-0ac8-4088-a88a-d5d3186dde8e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.285092] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1837.285092] env[63371]: value = "task-1774885" [ 1837.285092] env[63371]: _type = "Task" [ 1837.285092] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.296035] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774885, 'name': Rename_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.401931] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774884, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.529912] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.561959] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "9885de9e-c640-4d82-a47a-980988d89deb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.581s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.562386] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.581s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.562684] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "0c9156ea-81c4-4286-a20b-66068a5bce59" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.586s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.562982] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.589s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.567916] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.588s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.568297] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.583s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.573936] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "6df9af10-0053-4696-920a-10ab2af67ef5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.599s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.574320] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.596s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.574636] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.591s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.687108] env[63371]: INFO nova.compute.resource_tracker [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating resource usage from migration 67d1974a-6979-4f3e-959c-350c3b20701a [ 1837.719643] env[63371]: DEBUG nova.compute.manager [req-91792c66-ea08-40e9-8f03-452cdf0b4dff req-37fe50a0-2493-4526-ba6e-a309c2bf9993 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Received event network-changed-39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1837.719802] env[63371]: DEBUG nova.compute.manager [req-91792c66-ea08-40e9-8f03-452cdf0b4dff req-37fe50a0-2493-4526-ba6e-a309c2bf9993 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Refreshing instance network info cache due to event network-changed-39fe8c75-7aaa-42da-a231-9c68310ef7c8. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1837.720824] env[63371]: DEBUG oslo_concurrency.lockutils [req-91792c66-ea08-40e9-8f03-452cdf0b4dff req-37fe50a0-2493-4526-ba6e-a309c2bf9993 service nova] Acquiring lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1837.720824] env[63371]: DEBUG oslo_concurrency.lockutils [req-91792c66-ea08-40e9-8f03-452cdf0b4dff req-37fe50a0-2493-4526-ba6e-a309c2bf9993 service nova] Acquired lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1837.720824] env[63371]: DEBUG nova.network.neutron [req-91792c66-ea08-40e9-8f03-452cdf0b4dff req-37fe50a0-2493-4526-ba6e-a309c2bf9993 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Refreshing network info cache for port 39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1837.804091] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1837.804342] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1837.806121] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774885, 'name': Rename_Task, 'duration_secs': 0.332164} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.809143] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1837.809664] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4046934-b702-4b59-9770-405aa351ea23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.817287] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "407d1ef8-c5df-4277-b503-0d09cdaf8ef1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.567s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1837.817575] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1837.817575] env[63371]: value = "task-1774886" [ 1837.817575] env[63371]: _type = "Task" [ 1837.817575] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.828180] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774886, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.833491] env[63371]: DEBUG nova.network.neutron [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Successfully updated port: 5a88ea10-929b-41c9-b1b4-bf61377715c6 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1837.904465] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774884, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.548289} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.904465] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] ff724a9f-5e9a-4683-8eb3-058fb3639ea5/ff724a9f-5e9a-4683-8eb3-058fb3639ea5.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1837.905220] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1837.905220] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-82ad1d8b-99f0-400d-ad36-f6a40a3a9024 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.913701] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1837.913701] env[63371]: value = "task-1774887" [ 1837.913701] env[63371]: _type = "Task" [ 1837.913701] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.927066] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774887, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.990893] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c773b81c-a564-49cd-9c2b-e1b55b718025 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.000578] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030a8efc-7ecf-4321-8414-6c5998903ef1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.056388] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea551e3e-5856-415a-a5ca-14a05c8c0ee2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.067375] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-088f8d56-f1a6-422a-8982-3b98f414ea6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.084433] env[63371]: DEBUG nova.compute.provider_tree [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1838.310189] env[63371]: DEBUG nova.compute.manager [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1838.339563] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1838.339788] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1838.340024] env[63371]: DEBUG nova.network.neutron [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1838.355559] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774886, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.434820] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774887, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068339} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.438989] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1838.439906] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7b9d299-9fc0-4f16-aa9f-aef82a7c350b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.471238] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] ff724a9f-5e9a-4683-8eb3-058fb3639ea5/ff724a9f-5e9a-4683-8eb3-058fb3639ea5.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1838.471828] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f90b368a-8817-42b4-b192-e06e67e617d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.494087] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1838.494087] env[63371]: value = "task-1774888" [ 1838.494087] env[63371]: _type = "Task" [ 1838.494087] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.503284] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774888, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.590630] env[63371]: DEBUG nova.scheduler.client.report [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1838.607883] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "9885de9e-c640-4d82-a47a-980988d89deb" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.607883] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "9885de9e-c640-4d82-a47a-980988d89deb" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.658090] env[63371]: DEBUG nova.network.neutron [req-91792c66-ea08-40e9-8f03-452cdf0b4dff req-37fe50a0-2493-4526-ba6e-a309c2bf9993 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updated VIF entry in instance network info cache for port 39fe8c75-7aaa-42da-a231-9c68310ef7c8. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1838.658490] env[63371]: DEBUG nova.network.neutron [req-91792c66-ea08-40e9-8f03-452cdf0b4dff req-37fe50a0-2493-4526-ba6e-a309c2bf9993 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updating instance_info_cache with network_info: [{"id": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "address": "fa:16:3e:7a:b8:42", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39fe8c75-7a", "ovs_interfaceid": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1838.711953] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.711953] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.832597] env[63371]: DEBUG oslo_vmware.api [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774886, 'name': PowerOnVM_Task, 'duration_secs': 0.616572} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1838.833683] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1838.833683] env[63371]: INFO nova.compute.manager [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Took 8.65 seconds to spawn the instance on the hypervisor. [ 1838.833683] env[63371]: DEBUG nova.compute.manager [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1838.834483] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.835230] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e044c580-a1f7-4db4-91b5-182d52492243 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.882153] env[63371]: DEBUG nova.network.neutron [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1839.003808] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774888, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.051265] env[63371]: DEBUG nova.network.neutron [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance_info_cache with network_info: [{"id": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "address": "fa:16:3e:b6:f8:d2", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a88ea10-92", "ovs_interfaceid": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1839.099643] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.425s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.099643] env[63371]: INFO nova.compute.manager [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Migrating [ 1839.104332] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.104s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.104682] env[63371]: DEBUG nova.objects.instance [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lazy-loading 'resources' on Instance uuid da4839fa-8597-411c-b30c-0ac9226fec1f {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1839.114195] env[63371]: DEBUG nova.compute.utils [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1839.161442] env[63371]: DEBUG oslo_concurrency.lockutils [req-91792c66-ea08-40e9-8f03-452cdf0b4dff req-37fe50a0-2493-4526-ba6e-a309c2bf9993 service nova] Releasing lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.214815] env[63371]: DEBUG nova.compute.manager [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1839.356384] env[63371]: INFO nova.compute.manager [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Took 26.93 seconds to build instance. [ 1839.505013] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774888, 'name': ReconfigVM_Task, 'duration_secs': 0.9528} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.505620] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Reconfigured VM instance instance-0000006f to attach disk [datastore1] ff724a9f-5e9a-4683-8eb3-058fb3639ea5/ff724a9f-5e9a-4683-8eb3-058fb3639ea5.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1839.506286] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-061ead77-153e-4717-a17c-a74998e5e49e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.513238] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1839.513238] env[63371]: value = "task-1774889" [ 1839.513238] env[63371]: _type = "Task" [ 1839.513238] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.523459] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774889, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.554235] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.554614] env[63371]: DEBUG nova.compute.manager [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Instance network_info: |[{"id": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "address": "fa:16:3e:b6:f8:d2", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a88ea10-92", "ovs_interfaceid": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1839.555053] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:f8:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3a0ddd7d-c321-4187-bdd8-b19044ea2c4a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5a88ea10-929b-41c9-b1b4-bf61377715c6', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1839.563593] env[63371]: DEBUG oslo.service.loopingcall [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1839.563903] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1839.564220] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-404a66b8-4630-400a-8516-0d15cf01982f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.584551] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1839.584551] env[63371]: value = "task-1774890" [ 1839.584551] env[63371]: _type = "Task" [ 1839.584551] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.592673] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774890, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.617271] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.617668] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.617765] env[63371]: DEBUG nova.network.neutron [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1839.620812] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "9885de9e-c640-4d82-a47a-980988d89deb" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.740598] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.860925] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6adba6a1-31a9-4967-a88a-fa1a7a1acd96 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.459s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.861668] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.875s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.861955] env[63371]: INFO nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] During sync_power_state the instance has a pending task (spawning). Skip. [ 1839.862369] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.926246] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901a1fef-4e13-4987-a237-8ba815148f92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.934412] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdb667aa-7be6-486e-95fe-41b5e45a3ab4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.971589] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6949de14-c20c-4faf-88d0-8dc517b2ecc0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.976168] env[63371]: DEBUG nova.compute.manager [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Received event network-vif-plugged-5a88ea10-929b-41c9-b1b4-bf61377715c6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1839.976464] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] Acquiring lock "05f6f94a-c9c4-4737-8b07-77e9c2093497-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1839.976712] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1839.977055] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1839.977422] env[63371]: DEBUG nova.compute.manager [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] No waiting events found dispatching network-vif-plugged-5a88ea10-929b-41c9-b1b4-bf61377715c6 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1839.977682] env[63371]: WARNING nova.compute.manager [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Received unexpected event network-vif-plugged-5a88ea10-929b-41c9-b1b4-bf61377715c6 for instance with vm_state building and task_state spawning. [ 1839.977826] env[63371]: DEBUG nova.compute.manager [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Received event network-changed-5a88ea10-929b-41c9-b1b4-bf61377715c6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1839.978019] env[63371]: DEBUG nova.compute.manager [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Refreshing instance network info cache due to event network-changed-5a88ea10-929b-41c9-b1b4-bf61377715c6. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1839.978249] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] Acquiring lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1839.978477] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] Acquired lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1839.978575] env[63371]: DEBUG nova.network.neutron [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Refreshing network info cache for port 5a88ea10-929b-41c9-b1b4-bf61377715c6 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1839.991441] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215b7153-5211-4bfd-9f7c-a32be21861c8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.006285] env[63371]: DEBUG nova.compute.provider_tree [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1840.022183] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774889, 'name': Rename_Task, 'duration_secs': 0.303147} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.022707] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1840.022952] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d5db07ea-c339-4a71-b1db-4c15e3ceeab8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.030979] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1840.030979] env[63371]: value = "task-1774891" [ 1840.030979] env[63371]: _type = "Task" [ 1840.030979] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.040172] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774891, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.095377] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774890, 'name': CreateVM_Task, 'duration_secs': 0.353558} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.095445] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1840.096242] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.096446] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.096892] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1840.097191] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ccb6c30-1dae-4d62-b2df-007f1b31cb4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.102251] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1840.102251] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52824744-b8ae-318a-0107-d5d077ccf868" [ 1840.102251] env[63371]: _type = "Task" [ 1840.102251] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.113810] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52824744-b8ae-318a-0107-d5d077ccf868, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.443313] env[63371]: DEBUG nova.network.neutron [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance_info_cache with network_info: [{"id": "fdb2262d-54b0-4555-939f-39915c982e09", "address": "fa:16:3e:0d:42:9d", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdb2262d-54", "ovs_interfaceid": "fdb2262d-54b0-4555-939f-39915c982e09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.510281] env[63371]: DEBUG nova.scheduler.client.report [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1840.542983] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774891, 'name': PowerOnVM_Task} progress is 76%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.618055] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52824744-b8ae-318a-0107-d5d077ccf868, 'name': SearchDatastore_Task, 'duration_secs': 0.012994} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.618055] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1840.618055] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1840.618055] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1840.618055] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1840.618055] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1840.618055] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85842631-9f05-4e2b-a52d-da48d92c931e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.633868] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1840.633868] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1840.633868] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6c0c088-3119-441c-a8a1-157e6bf75ffa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.642837] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1840.642837] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]527802f4-2b69-3ea3-b8ed-412e02478f8a" [ 1840.642837] env[63371]: _type = "Task" [ 1840.642837] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.652693] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527802f4-2b69-3ea3-b8ed-412e02478f8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.695822] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "9885de9e-c640-4d82-a47a-980988d89deb" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.695822] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "9885de9e-c640-4d82-a47a-980988d89deb" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.695822] env[63371]: INFO nova.compute.manager [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Attaching volume 74095c08-847f-4b4a-b107-0d7acbea84a7 to /dev/sdb [ 1840.730436] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b93d97-b6b1-408b-acb2-c036b31c311a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.739100] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac409118-b177-409b-96a3-889b7d270ec9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.753388] env[63371]: DEBUG nova.virt.block_device [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Updating existing volume attachment record: c6ac2568-e8d7-409d-81e7-a4030f556ca0 {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1840.862068] env[63371]: DEBUG nova.network.neutron [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updated VIF entry in instance network info cache for port 5a88ea10-929b-41c9-b1b4-bf61377715c6. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1840.862422] env[63371]: DEBUG nova.network.neutron [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance_info_cache with network_info: [{"id": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "address": "fa:16:3e:b6:f8:d2", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a88ea10-92", "ovs_interfaceid": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1840.945965] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1841.016667] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.912s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.021272] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.152s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.021411] env[63371]: DEBUG nova.objects.instance [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lazy-loading 'resources' on Instance uuid 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1841.044013] env[63371]: DEBUG oslo_vmware.api [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774891, 'name': PowerOnVM_Task, 'duration_secs': 0.978109} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.045155] env[63371]: INFO nova.scheduler.client.report [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Deleted allocations for instance da4839fa-8597-411c-b30c-0ac9226fec1f [ 1841.046122] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1841.046373] env[63371]: INFO nova.compute.manager [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Took 8.50 seconds to spawn the instance on the hypervisor. [ 1841.046584] env[63371]: DEBUG nova.compute.manager [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1841.050701] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced10f1f-b8e5-4bda-971a-d78f070bedb8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.155422] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527802f4-2b69-3ea3-b8ed-412e02478f8a, 'name': SearchDatastore_Task, 'duration_secs': 0.025769} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.156262] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5d10296-e4b2-4aac-976a-e6a8c6d00ce1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.161794] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1841.161794] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]527517bd-1dbb-75b7-7400-4a5a40cfde3d" [ 1841.161794] env[63371]: _type = "Task" [ 1841.161794] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.169916] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527517bd-1dbb-75b7-7400-4a5a40cfde3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.365961] env[63371]: DEBUG oslo_concurrency.lockutils [req-3a75c4ec-e2ed-4ce3-9a26-f559325b2914 req-f10cc8cc-eaa4-45f6-9fea-368076430a39 service nova] Releasing lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1841.560261] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9e6856b0-d9a8-4fa8-a39d-572364f4a629 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.939s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.561282] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.584s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.565632] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1cc3d8d-3da1-4404-b6c8-f66d4daa6868 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.572152] env[63371]: INFO nova.compute.manager [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Took 16.93 seconds to build instance. [ 1841.577139] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86905b1e-1344-44bd-867e-867a93aefe7e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.671782] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527517bd-1dbb-75b7-7400-4a5a40cfde3d, 'name': SearchDatastore_Task, 'duration_secs': 0.010167} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.674444] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1841.674719] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 05f6f94a-c9c4-4737-8b07-77e9c2093497/05f6f94a-c9c4-4737-8b07-77e9c2093497.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1841.675765] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34325773-76ea-4c15-b2a5-45888be64c04 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.681465] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1841.681465] env[63371]: value = "task-1774895" [ 1841.681465] env[63371]: _type = "Task" [ 1841.681465] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.691290] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774895, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.777221] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c709a7c-2152-49fb-80f8-3d22b2044198 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.785693] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4979da12-e9c7-4823-a296-9daf54f9bcd7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.818997] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88302c5-4265-49ed-9085-e726388c1613 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.827469] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6cd9d7-3490-420a-ad05-b34bdbbf9eca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.842437] env[63371]: DEBUG nova.compute.provider_tree [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1842.075427] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b0b55e47-5ee9-4597-b613-e7924be5267b tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.443s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.075739] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.088s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.075934] env[63371]: INFO nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] During sync_power_state the instance has a pending task (spawning). Skip. [ 1842.076123] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.122712] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "da4839fa-8597-411c-b30c-0ac9226fec1f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.561s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.191701] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774895, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.345965] env[63371]: DEBUG nova.scheduler.client.report [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1842.461133] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff87530-589a-4513-8223-b37a83567122 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.480365] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance '1689fc63-3c07-4517-bbef-0011d860e9fc' progress to 0 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1842.629195] env[63371]: DEBUG nova.compute.manager [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1842.630149] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-969c264a-98a7-488f-980c-57532c43ae81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.695396] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774895, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600062} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.695674] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 05f6f94a-c9c4-4737-8b07-77e9c2093497/05f6f94a-c9c4-4737-8b07-77e9c2093497.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1842.695901] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1842.696181] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2bbd4afb-235b-4122-832a-e5c2a758a379 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.704501] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1842.704501] env[63371]: value = "task-1774896" [ 1842.704501] env[63371]: _type = "Task" [ 1842.704501] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.713527] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774896, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.850769] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.830s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.853050] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.323s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.853662] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1842.853662] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1842.853993] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.020s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1842.856758] env[63371]: INFO nova.compute.claims [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1842.859345] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baa03449-14f8-4917-ac9d-f1c833d1045c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.870913] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b799b81b-5898-4787-a311-3e3c10d4ae8a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.874158] env[63371]: INFO nova.scheduler.client.report [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Deleted allocations for instance 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7 [ 1842.889037] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45307b6d-9a97-4ecb-ba40-70dc0fdd9166 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.896971] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57da106e-064d-448e-adba-beca65ec7cb2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.926364] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179709MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1842.926526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1842.986191] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1842.986500] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6adbd54d-249e-4d66-b921-703d75821378 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.995346] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1842.995346] env[63371]: value = "task-1774897" [ 1842.995346] env[63371]: _type = "Task" [ 1842.995346] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.005511] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.143223] env[63371]: INFO nova.compute.manager [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] instance snapshotting [ 1843.146752] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18debb4-9ca6-4102-b37f-453a51bd3d10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.171976] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aca4fbb1-3a45-46f7-995a-4294c07f3eb4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.215850] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774896, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076192} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.217595] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1843.218814] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a77e2ae-5d94-4aa6-b8ff-8622d40983b2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.225270] env[63371]: DEBUG nova.compute.manager [req-7add94c2-f573-4647-aa0f-4b22a6420e3d req-89c85ae1-83ed-4880-b893-ac857304c6a6 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Received event network-changed-baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1843.225560] env[63371]: DEBUG nova.compute.manager [req-7add94c2-f573-4647-aa0f-4b22a6420e3d req-89c85ae1-83ed-4880-b893-ac857304c6a6 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Refreshing instance network info cache due to event network-changed-baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1843.225786] env[63371]: DEBUG oslo_concurrency.lockutils [req-7add94c2-f573-4647-aa0f-4b22a6420e3d req-89c85ae1-83ed-4880-b893-ac857304c6a6 service nova] Acquiring lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.225941] env[63371]: DEBUG oslo_concurrency.lockutils [req-7add94c2-f573-4647-aa0f-4b22a6420e3d req-89c85ae1-83ed-4880-b893-ac857304c6a6 service nova] Acquired lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.226118] env[63371]: DEBUG nova.network.neutron [req-7add94c2-f573-4647-aa0f-4b22a6420e3d req-89c85ae1-83ed-4880-b893-ac857304c6a6 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Refreshing network info cache for port baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1843.251159] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 05f6f94a-c9c4-4737-8b07-77e9c2093497/05f6f94a-c9c4-4737-8b07-77e9c2093497.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1843.252240] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0639c4d-6d6b-4a76-8558-9cfd86b5cf29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.273140] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1843.273140] env[63371]: value = "task-1774898" [ 1843.273140] env[63371]: _type = "Task" [ 1843.273140] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.281892] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774898, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.383622] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e660cf9c-9bd9-45f5-94f7-99d0963b7163 tempest-AttachInterfacesUnderV243Test-1340138091 tempest-AttachInterfacesUnderV243Test-1340138091-project-member] Lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.082s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.385200] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 6.408s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.385722] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5775a647-7571-48f1-90d8-0b3a58665544 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.395283] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6272323b-f5b3-401e-9592-bd4021942266 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.507154] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774897, 'name': PowerOffVM_Task, 'duration_secs': 0.428628} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.507790] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1843.507994] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance '1689fc63-3c07-4517-bbef-0011d860e9fc' progress to 17 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1843.644251] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "cfa04c51-c077-4f16-ae57-e54d62aac044" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1843.644647] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.683599] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1843.683914] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-af73db6c-ddef-49ba-bb2b-6011601aa98e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.691744] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1843.691744] env[63371]: value = "task-1774900" [ 1843.691744] env[63371]: _type = "Task" [ 1843.691744] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.701983] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774900, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.782869] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774898, 'name': ReconfigVM_Task, 'duration_secs': 0.295661} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.783121] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 05f6f94a-c9c4-4737-8b07-77e9c2093497/05f6f94a-c9c4-4737-8b07-77e9c2093497.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1843.783890] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31ba9892-97b2-4a0e-b9a0-2147187ed6b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.789720] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1843.789720] env[63371]: value = "task-1774901" [ 1843.789720] env[63371]: _type = "Task" [ 1843.789720] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.798285] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774901, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.932312] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "3b2ed231-9f9c-4d28-9c81-034c2d17c9a7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.547s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.986224] env[63371]: DEBUG nova.network.neutron [req-7add94c2-f573-4647-aa0f-4b22a6420e3d req-89c85ae1-83ed-4880-b893-ac857304c6a6 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updated VIF entry in instance network info cache for port baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1843.986579] env[63371]: DEBUG nova.network.neutron [req-7add94c2-f573-4647-aa0f-4b22a6420e3d req-89c85ae1-83ed-4880-b893-ac857304c6a6 service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updating instance_info_cache with network_info: [{"id": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "address": "fa:16:3e:38:f8:9a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaee0e3e-86", "ovs_interfaceid": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.014430] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1844.014671] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1844.014958] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1844.015047] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1844.015174] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1844.015307] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1844.015512] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1844.015665] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1844.015948] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1844.016043] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1844.016203] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1844.023894] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5597a634-eef8-4936-ae04-edd701084411 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.046015] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1844.046015] env[63371]: value = "task-1774902" [ 1844.046015] env[63371]: _type = "Task" [ 1844.046015] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.055724] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774902, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.139214] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4092b5-44d8-42d3-815f-32249bfb6574 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.147241] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d36706-cd01-47e6-ae5a-af9ee006f160 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.153543] env[63371]: DEBUG nova.compute.manager [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1844.185789] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8967d2cb-2f50-41d4-995d-9f49ecd5a806 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.200217] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-899ecb78-5c5d-444f-8934-f2e0a6bf3847 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.210654] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774900, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.219458] env[63371]: DEBUG nova.compute.provider_tree [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1844.305559] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774901, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.490286] env[63371]: DEBUG oslo_concurrency.lockutils [req-7add94c2-f573-4647-aa0f-4b22a6420e3d req-89c85ae1-83ed-4880-b893-ac857304c6a6 service nova] Releasing lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.556788] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774902, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.671855] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.706323] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774900, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.745675] env[63371]: ERROR nova.scheduler.client.report [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [req-f085d543-2690-4405-aa4d-6fe7eb53d2fd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f085d543-2690-4405-aa4d-6fe7eb53d2fd"}]} [ 1844.762552] env[63371]: DEBUG nova.scheduler.client.report [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1844.780698] env[63371]: DEBUG nova.scheduler.client.report [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1844.781089] env[63371]: DEBUG nova.compute.provider_tree [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1844.796927] env[63371]: DEBUG nova.scheduler.client.report [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1844.807519] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774901, 'name': Rename_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.822628] env[63371]: DEBUG nova.scheduler.client.report [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1845.060114] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774902, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.079999] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5a8f2a4-c81c-4fa2-8de6-f79c586d6285 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.088216] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbbd62b-89c2-4dcc-8ed4-a6e14a53fa67 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.839886] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1845.840258] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368492', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'name': 'volume-74095c08-847f-4b4a-b107-0d7acbea84a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9885de9e-c640-4d82-a47a-980988d89deb', 'attached_at': '', 'detached_at': '', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'serial': '74095c08-847f-4b4a-b107-0d7acbea84a7'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1845.848803] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc522d2-d531-4cd3-af3b-cf38397afc4e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.851431] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0097b7-17a0-473d-9b1c-fbe6de5f63a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.861140] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774900, 'name': CreateSnapshot_Task, 'duration_secs': 1.255485} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.876977] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c04ce5-f58d-49e7-841d-9ac3774cc928 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.880608] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1845.880900] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774902, 'name': ReconfigVM_Task, 'duration_secs': 1.20842} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.881121] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774901, 'name': Rename_Task, 'duration_secs': 1.134303} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.881882] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c04e0a9-e520-4852-ba33-5f3c02d4d127 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.884331] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance '1689fc63-3c07-4517-bbef-0011d860e9fc' progress to 33 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1845.887603] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1845.888246] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22efc040-ee5e-4fb0-99a3-6aa1c829316b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.890913] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e76692c-7fcd-423f-96e5-a367c4d08a4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.900483] env[63371]: DEBUG nova.compute.provider_tree [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1845.919991] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1845.919991] env[63371]: value = "task-1774903" [ 1845.919991] env[63371]: _type = "Task" [ 1845.919991] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.927480] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-74095c08-847f-4b4a-b107-0d7acbea84a7/volume-74095c08-847f-4b4a-b107-0d7acbea84a7.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1845.928811] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-98fb9588-4c91-4af8-9e79-e1fde2c5514f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.951468] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774903, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.953159] env[63371]: DEBUG oslo_vmware.api [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1845.953159] env[63371]: value = "task-1774904" [ 1845.953159] env[63371]: _type = "Task" [ 1845.953159] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.961842] env[63371]: DEBUG oslo_vmware.api [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774904, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.393929] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1846.394232] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1846.394555] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1846.394740] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1846.394934] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1846.395137] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1846.395390] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1846.395585] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1846.395824] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1846.396066] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1846.396290] env[63371]: DEBUG nova.virt.hardware [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1846.403630] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Reconfiguring VM instance instance-0000006c to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1846.403987] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-355f5969-5448-4afa-a059-c36f71db1397 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.424491] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1846.424491] env[63371]: value = "task-1774905" [ 1846.424491] env[63371]: _type = "Task" [ 1846.424491] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.440143] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1846.441820] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774905, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.441820] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7b43186e-1f50-4f9f-aa5b-2b8c26962897 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.452933] env[63371]: DEBUG oslo_vmware.api [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1774903, 'name': PowerOnVM_Task, 'duration_secs': 0.483734} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.454318] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1846.454548] env[63371]: INFO nova.compute.manager [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Took 9.66 seconds to spawn the instance on the hypervisor. [ 1846.454723] env[63371]: DEBUG nova.compute.manager [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1846.455114] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1846.455114] env[63371]: value = "task-1774906" [ 1846.455114] env[63371]: _type = "Task" [ 1846.455114] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.456152] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c14ad7f1-8eed-4fbc-878c-81cc911ac27c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.464364] env[63371]: DEBUG nova.scheduler.client.report [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 162 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1846.464632] env[63371]: DEBUG nova.compute.provider_tree [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 162 to 163 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1846.464806] env[63371]: DEBUG nova.compute.provider_tree [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1846.489371] env[63371]: DEBUG oslo_vmware.api [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774904, 'name': ReconfigVM_Task, 'duration_secs': 0.369851} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.489656] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774906, 'name': CloneVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.490164] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-74095c08-847f-4b4a-b107-0d7acbea84a7/volume-74095c08-847f-4b4a-b107-0d7acbea84a7.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1846.495337] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43a54656-cb23-45a0-85e9-6389c3d930c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.511488] env[63371]: DEBUG oslo_vmware.api [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1846.511488] env[63371]: value = "task-1774907" [ 1846.511488] env[63371]: _type = "Task" [ 1846.511488] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.520816] env[63371]: DEBUG oslo_vmware.api [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774907, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.935543] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774905, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.971865] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.118s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.972523] env[63371]: DEBUG nova.compute.manager [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1846.975435] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774906, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.976065] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.236s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.977696] env[63371]: INFO nova.compute.claims [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1846.999205] env[63371]: INFO nova.compute.manager [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Took 20.37 seconds to build instance. [ 1847.021493] env[63371]: DEBUG oslo_vmware.api [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774907, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.212326] env[63371]: DEBUG nova.compute.manager [req-e0452926-9da4-4c8b-98d8-412311e6a140 req-41658861-d342-4b63-9d7b-c070bf32e0ff service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Received event network-changed-5a88ea10-929b-41c9-b1b4-bf61377715c6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1847.212531] env[63371]: DEBUG nova.compute.manager [req-e0452926-9da4-4c8b-98d8-412311e6a140 req-41658861-d342-4b63-9d7b-c070bf32e0ff service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Refreshing instance network info cache due to event network-changed-5a88ea10-929b-41c9-b1b4-bf61377715c6. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1847.212750] env[63371]: DEBUG oslo_concurrency.lockutils [req-e0452926-9da4-4c8b-98d8-412311e6a140 req-41658861-d342-4b63-9d7b-c070bf32e0ff service nova] Acquiring lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.213008] env[63371]: DEBUG oslo_concurrency.lockutils [req-e0452926-9da4-4c8b-98d8-412311e6a140 req-41658861-d342-4b63-9d7b-c070bf32e0ff service nova] Acquired lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1847.213084] env[63371]: DEBUG nova.network.neutron [req-e0452926-9da4-4c8b-98d8-412311e6a140 req-41658861-d342-4b63-9d7b-c070bf32e0ff service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Refreshing network info cache for port 5a88ea10-929b-41c9-b1b4-bf61377715c6 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1847.435942] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774905, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.471330] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774906, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.477161] env[63371]: DEBUG nova.compute.utils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1847.478466] env[63371]: DEBUG nova.compute.manager [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1847.478635] env[63371]: DEBUG nova.network.neutron [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1847.501485] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8ebc9d16-43e3-4e46-ab32-142cac9cdb31 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.883s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.502092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 10.514s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.502293] env[63371]: INFO nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] During sync_power_state the instance has a pending task (spawning). Skip. [ 1847.502479] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.522590] env[63371]: DEBUG oslo_vmware.api [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774907, 'name': ReconfigVM_Task, 'duration_secs': 0.918936} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.524127] env[63371]: DEBUG nova.policy [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aacd81490704110b6cc6aba338883a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a5897667b6b47deb7ff5b64f9499f36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1847.525832] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368492', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'name': 'volume-74095c08-847f-4b4a-b107-0d7acbea84a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9885de9e-c640-4d82-a47a-980988d89deb', 'attached_at': '', 'detached_at': '', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'serial': '74095c08-847f-4b4a-b107-0d7acbea84a7'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1847.939216] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774905, 'name': ReconfigVM_Task, 'duration_secs': 1.013408} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.942158] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Reconfigured VM instance instance-0000006c to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1847.944863] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1a7766-d91a-4241-9901-6f8f2c3b5a74 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.946786] env[63371]: DEBUG nova.network.neutron [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Successfully created port: 2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1847.968142] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 1689fc63-3c07-4517-bbef-0011d860e9fc/1689fc63-3c07-4517-bbef-0011d860e9fc.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1847.968142] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7efdde41-cd4b-4d0c-bc23-cc298fb7aee8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.983819] env[63371]: DEBUG nova.compute.manager [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1848.003622] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774906, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.003622] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1848.003622] env[63371]: value = "task-1774908" [ 1848.003622] env[63371]: _type = "Task" [ 1848.003622] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.016990] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.189631] env[63371]: DEBUG nova.network.neutron [req-e0452926-9da4-4c8b-98d8-412311e6a140 req-41658861-d342-4b63-9d7b-c070bf32e0ff service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updated VIF entry in instance network info cache for port 5a88ea10-929b-41c9-b1b4-bf61377715c6. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1848.191293] env[63371]: DEBUG nova.network.neutron [req-e0452926-9da4-4c8b-98d8-412311e6a140 req-41658861-d342-4b63-9d7b-c070bf32e0ff service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance_info_cache with network_info: [{"id": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "address": "fa:16:3e:b6:f8:d2", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a88ea10-92", "ovs_interfaceid": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1848.258528] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5721f27-b4b6-4122-9530-30b11c1a3fad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.266011] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e8ae4cb-ecd6-478b-955d-afba080e9e48 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.310931] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358921ee-8ad8-412d-856a-f4b40121edff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.318721] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35c11b8-7ac5-41c1-8601-52197e3ae8be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.331874] env[63371]: DEBUG nova.compute.provider_tree [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1848.478937] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774906, 'name': CloneVM_Task, 'duration_secs': 1.674876} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.479232] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Created linked-clone VM from snapshot [ 1848.479969] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d600a8e-82a1-455e-8b12-ecf87a2a781d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.487806] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Uploading image 56874c7d-cca0-4fbc-843b-c752b739c3d5 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1848.501483] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1848.501827] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d023e785-c81c-4a06-a38e-c836a73c6469 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.512818] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774908, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.514186] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1848.514186] env[63371]: value = "task-1774909" [ 1848.514186] env[63371]: _type = "Task" [ 1848.514186] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.522201] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774909, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.568469] env[63371]: DEBUG nova.objects.instance [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lazy-loading 'flavor' on Instance uuid 9885de9e-c640-4d82-a47a-980988d89deb {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1848.692837] env[63371]: DEBUG oslo_concurrency.lockutils [req-e0452926-9da4-4c8b-98d8-412311e6a140 req-41658861-d342-4b63-9d7b-c070bf32e0ff service nova] Releasing lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1848.835390] env[63371]: DEBUG nova.scheduler.client.report [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1848.999663] env[63371]: DEBUG nova.compute.manager [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1849.015557] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774908, 'name': ReconfigVM_Task, 'duration_secs': 0.620496} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.019158] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 1689fc63-3c07-4517-bbef-0011d860e9fc/1689fc63-3c07-4517-bbef-0011d860e9fc.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1849.019348] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance '1689fc63-3c07-4517-bbef-0011d860e9fc' progress to 50 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1849.029024] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774909, 'name': Destroy_Task, 'duration_secs': 0.409232} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.029274] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Destroyed the VM [ 1849.029783] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1849.029994] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-41aa40c2-70d4-4175-b66e-e857b2523da8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.043108] env[63371]: DEBUG nova.virt.hardware [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1849.043108] env[63371]: DEBUG nova.virt.hardware [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1849.043108] env[63371]: DEBUG nova.virt.hardware [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1849.043108] env[63371]: DEBUG nova.virt.hardware [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1849.043108] env[63371]: DEBUG nova.virt.hardware [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1849.043108] env[63371]: DEBUG nova.virt.hardware [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1849.043108] env[63371]: DEBUG nova.virt.hardware [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1849.043108] env[63371]: DEBUG nova.virt.hardware [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1849.043108] env[63371]: DEBUG nova.virt.hardware [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1849.043108] env[63371]: DEBUG nova.virt.hardware [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1849.043108] env[63371]: DEBUG nova.virt.hardware [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1849.044413] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08278583-edd5-45ce-927a-579df5d873da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.049590] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1849.049590] env[63371]: value = "task-1774910" [ 1849.049590] env[63371]: _type = "Task" [ 1849.049590] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.056233] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40b2cfb1-89f3-4a69-a681-e7ac86b96b32 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.063519] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774910, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.074946] env[63371]: DEBUG oslo_concurrency.lockutils [None req-eb4d5953-be15-410e-97ca-19df6800dc8b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "9885de9e-c640-4d82-a47a-980988d89deb" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.381s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.342245] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.342779] env[63371]: DEBUG nova.compute.manager [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1849.346470] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 6.419s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.407123] env[63371]: INFO nova.compute.manager [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Rebuilding instance [ 1849.448763] env[63371]: DEBUG nova.compute.manager [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1849.449652] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0934e9a3-f398-46b7-9327-1413704dbd54 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.466181] env[63371]: DEBUG nova.compute.manager [req-4f757b53-098a-48d9-95b7-2a9ea38ae83e req-5b9b9d5f-3d04-4c8d-ae17-f58a7d86ebf1 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Received event network-vif-plugged-2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1849.466181] env[63371]: DEBUG oslo_concurrency.lockutils [req-4f757b53-098a-48d9-95b7-2a9ea38ae83e req-5b9b9d5f-3d04-4c8d-ae17-f58a7d86ebf1 service nova] Acquiring lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.466181] env[63371]: DEBUG oslo_concurrency.lockutils [req-4f757b53-098a-48d9-95b7-2a9ea38ae83e req-5b9b9d5f-3d04-4c8d-ae17-f58a7d86ebf1 service nova] Lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.466181] env[63371]: DEBUG oslo_concurrency.lockutils [req-4f757b53-098a-48d9-95b7-2a9ea38ae83e req-5b9b9d5f-3d04-4c8d-ae17-f58a7d86ebf1 service nova] Lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.466181] env[63371]: DEBUG nova.compute.manager [req-4f757b53-098a-48d9-95b7-2a9ea38ae83e req-5b9b9d5f-3d04-4c8d-ae17-f58a7d86ebf1 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] No waiting events found dispatching network-vif-plugged-2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1849.466181] env[63371]: WARNING nova.compute.manager [req-4f757b53-098a-48d9-95b7-2a9ea38ae83e req-5b9b9d5f-3d04-4c8d-ae17-f58a7d86ebf1 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Received unexpected event network-vif-plugged-2c2ab976-7609-4012-a826-68288c4f7f64 for instance with vm_state building and task_state spawning. [ 1849.527616] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ee09e9-292f-40a3-a5d0-5dcf9c73bb2b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.547180] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc47fdf-b128-4c51-8a73-238d2491843f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.566256] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance '1689fc63-3c07-4517-bbef-0011d860e9fc' progress to 67 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1849.571345] env[63371]: DEBUG nova.network.neutron [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Successfully updated port: 2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1849.576719] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774910, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.850446] env[63371]: DEBUG nova.compute.utils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1849.859442] env[63371]: DEBUG nova.compute.manager [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1849.859581] env[63371]: DEBUG nova.network.neutron [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1849.912841] env[63371]: DEBUG nova.policy [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c6416719728485f8dd45eea9e39fdc5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '58f967d3770541269fb89f48b3df58c9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1849.961391] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1849.961761] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43dc7cd2-4c71-48e2-9606-1233284aecba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.972332] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1849.972332] env[63371]: value = "task-1774911" [ 1849.972332] env[63371]: _type = "Task" [ 1849.972332] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.980827] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774911, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.060012] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774910, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.075876] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.076021] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.076168] env[63371]: DEBUG nova.network.neutron [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1850.141193] env[63371]: DEBUG nova.network.neutron [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Port fdb2262d-54b0-4555-939f-39915c982e09 binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1850.234686] env[63371]: DEBUG nova.network.neutron [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Successfully created port: 0f111dbf-ca3c-4a52-8de4-ece209ccf945 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1850.363634] env[63371]: DEBUG nova.compute.manager [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1850.366594] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Applying migration context for instance 1689fc63-3c07-4517-bbef-0011d860e9fc as it has an incoming, in-progress migration 67d1974a-6979-4f3e-959c-350c3b20701a. Migration status is migrating {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1850.368383] env[63371]: INFO nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating resource usage from migration 67d1974a-6979-4f3e-959c-350c3b20701a [ 1850.389012] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.389175] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 6df9af10-0053-4696-920a-10ab2af67ef5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.389295] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 0c9156ea-81c4-4286-a20b-66068a5bce59 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.389412] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 0518c5a8-8cc1-4829-a0cf-5f5904f6df86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.389731] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 1ec21edd-7b7c-4a2b-983f-8aa6c022e033 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.389731] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 9885de9e-c640-4d82-a47a-980988d89deb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.389731] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.389876] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance da2e3b05-9cb0-49bb-8945-924e48cf3431 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.389961] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.390061] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance a27ab7ba-481c-4292-a885-5dc8d8653d0b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.390186] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance ff724a9f-5e9a-4683-8eb3-058fb3639ea5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.390288] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 05f6f94a-c9c4-4737-8b07-77e9c2093497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.390394] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Migration 67d1974a-6979-4f3e-959c-350c3b20701a is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1850.390546] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 1689fc63-3c07-4517-bbef-0011d860e9fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.390631] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance e0fa0976-9a73-4b8b-b011-2e15199be5ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.390742] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance a083adca-0638-4a39-bd4c-30c64d1c9b0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1850.483053] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774911, 'name': PowerOffVM_Task, 'duration_secs': 0.223969} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.483053] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1850.534885] env[63371]: INFO nova.compute.manager [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Detaching volume 74095c08-847f-4b4a-b107-0d7acbea84a7 [ 1850.559581] env[63371]: DEBUG oslo_vmware.api [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774910, 'name': RemoveSnapshot_Task, 'duration_secs': 1.478055} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.559850] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1850.567488] env[63371]: INFO nova.virt.block_device [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Attempting to driver detach volume 74095c08-847f-4b4a-b107-0d7acbea84a7 from mountpoint /dev/sdb [ 1850.567713] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1850.567891] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368492', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'name': 'volume-74095c08-847f-4b4a-b107-0d7acbea84a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9885de9e-c640-4d82-a47a-980988d89deb', 'attached_at': '', 'detached_at': '', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'serial': '74095c08-847f-4b4a-b107-0d7acbea84a7'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1850.568699] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a28c836-4b09-4cdd-9c76-e8b37a731a89 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.596770] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65acf07d-30e2-4270-96a4-20e0fae5bd76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.604995] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e25b13a-5643-42ad-9da8-2247f13d90cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.627364] env[63371]: DEBUG nova.network.neutron [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1850.629630] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f92c14-360b-4414-83ed-4749435dd06c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.646286] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] The volume has not been displaced from its original location: [datastore1] volume-74095c08-847f-4b4a-b107-0d7acbea84a7/volume-74095c08-847f-4b4a-b107-0d7acbea84a7.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1850.651636] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1850.657380] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-08cfd3a7-19db-446f-84b3-991a9aaba2fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.676125] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1850.676125] env[63371]: value = "task-1774912" [ 1850.676125] env[63371]: _type = "Task" [ 1850.676125] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.684516] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774912, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.842341] env[63371]: DEBUG nova.network.neutron [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updating instance_info_cache with network_info: [{"id": "2c2ab976-7609-4012-a826-68288c4f7f64", "address": "fa:16:3e:c1:5a:c9", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c2ab976-76", "ovs_interfaceid": "2c2ab976-7609-4012-a826-68288c4f7f64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.894598] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance cfa04c51-c077-4f16-ae57-e54d62aac044 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1850.894598] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1850.894598] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1851.064117] env[63371]: WARNING nova.compute.manager [None req-fca26550-7b92-43fb-bc4a-c27f188afdb8 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Image not found during snapshot: nova.exception.ImageNotFound: Image 56874c7d-cca0-4fbc-843b-c752b739c3d5 could not be found. [ 1851.097280] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e015a562-09fc-4416-9da9-9c8f4a9ac786 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.105142] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9028a045-9ece-40f0-822b-b40016a9d0a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.135956] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34366780-2dba-4766-9ff5-7bd0b114b461 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.143535] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a3cb1d-dd80-4827-94c7-aa200c529c65 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.157062] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1851.187382] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "1689fc63-3c07-4517-bbef-0011d860e9fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.187677] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.187877] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.195227] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774912, 'name': ReconfigVM_Task, 'duration_secs': 0.210125} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.195465] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1851.200726] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-636e5cfd-a706-42b7-87dd-bff0dd200ff4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.216115] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1851.216115] env[63371]: value = "task-1774913" [ 1851.216115] env[63371]: _type = "Task" [ 1851.216115] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.224184] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774913, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.345417] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.346233] env[63371]: DEBUG nova.compute.manager [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Instance network_info: |[{"id": "2c2ab976-7609-4012-a826-68288c4f7f64", "address": "fa:16:3e:c1:5a:c9", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c2ab976-76", "ovs_interfaceid": "2c2ab976-7609-4012-a826-68288c4f7f64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1851.346654] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c1:5a:c9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ba3bd22-c936-470e-89bd-b3a5587e87a0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c2ab976-7609-4012-a826-68288c4f7f64', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1851.354695] env[63371]: DEBUG oslo.service.loopingcall [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1851.354905] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1851.355143] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2373ede5-607d-4d3f-ba04-4b723fd55053 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.374185] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1851.374185] env[63371]: value = "task-1774914" [ 1851.374185] env[63371]: _type = "Task" [ 1851.374185] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.378167] env[63371]: DEBUG nova.compute.manager [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1851.386028] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774914, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.406668] env[63371]: DEBUG nova.virt.hardware [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1851.406921] env[63371]: DEBUG nova.virt.hardware [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1851.407087] env[63371]: DEBUG nova.virt.hardware [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1851.407273] env[63371]: DEBUG nova.virt.hardware [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1851.407419] env[63371]: DEBUG nova.virt.hardware [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1851.407562] env[63371]: DEBUG nova.virt.hardware [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1851.407767] env[63371]: DEBUG nova.virt.hardware [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1851.407922] env[63371]: DEBUG nova.virt.hardware [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1851.408103] env[63371]: DEBUG nova.virt.hardware [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1851.408270] env[63371]: DEBUG nova.virt.hardware [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1851.408443] env[63371]: DEBUG nova.virt.hardware [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1851.409312] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04886ec-9a8d-4f7b-b995-64172e72259b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.421996] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1265b2-4c1e-4b9b-801b-fe08f6d70db2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.500110] env[63371]: DEBUG nova.compute.manager [req-ffdaf550-c7c8-447b-8061-554c50e55f1a req-7a7079ed-9387-41b4-aabf-d2c66e606c46 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Received event network-changed-2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1851.500316] env[63371]: DEBUG nova.compute.manager [req-ffdaf550-c7c8-447b-8061-554c50e55f1a req-7a7079ed-9387-41b4-aabf-d2c66e606c46 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Refreshing instance network info cache due to event network-changed-2c2ab976-7609-4012-a826-68288c4f7f64. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1851.500551] env[63371]: DEBUG oslo_concurrency.lockutils [req-ffdaf550-c7c8-447b-8061-554c50e55f1a req-7a7079ed-9387-41b4-aabf-d2c66e606c46 service nova] Acquiring lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.500661] env[63371]: DEBUG oslo_concurrency.lockutils [req-ffdaf550-c7c8-447b-8061-554c50e55f1a req-7a7079ed-9387-41b4-aabf-d2c66e606c46 service nova] Acquired lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1851.500819] env[63371]: DEBUG nova.network.neutron [req-ffdaf550-c7c8-447b-8061-554c50e55f1a req-7a7079ed-9387-41b4-aabf-d2c66e606c46 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Refreshing network info cache for port 2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1851.689124] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 163 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1851.689506] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 163 to 164 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1851.689777] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1851.728711] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774913, 'name': ReconfigVM_Task, 'duration_secs': 0.309952} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.729016] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368492', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'name': 'volume-74095c08-847f-4b4a-b107-0d7acbea84a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9885de9e-c640-4d82-a47a-980988d89deb', 'attached_at': '', 'detached_at': '', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'serial': '74095c08-847f-4b4a-b107-0d7acbea84a7'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1851.757158] env[63371]: DEBUG nova.network.neutron [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Successfully updated port: 0f111dbf-ca3c-4a52-8de4-ece209ccf945 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1851.884260] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774914, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.931477] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.931707] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.931917] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.932119] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.932289] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.934642] env[63371]: INFO nova.compute.manager [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Terminating instance [ 1851.936404] env[63371]: DEBUG nova.compute.manager [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1851.936599] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1851.937491] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4a2c13-fba6-4ff5-bd66-249f97a8e9ff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.945056] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1851.945302] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43a21bbc-84c2-46a4-8b01-c4796a0e1896 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.950836] env[63371]: DEBUG oslo_vmware.api [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1851.950836] env[63371]: value = "task-1774915" [ 1851.950836] env[63371]: _type = "Task" [ 1851.950836] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.959269] env[63371]: DEBUG oslo_vmware.api [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774915, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.198602] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1852.198888] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.853s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.201419] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.530s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1852.202932] env[63371]: INFO nova.compute.claims [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1852.250431] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.250713] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.250988] env[63371]: DEBUG nova.network.neutron [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1852.259707] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "refresh_cache-a083adca-0638-4a39-bd4c-30c64d1c9b0e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.259848] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "refresh_cache-a083adca-0638-4a39-bd4c-30c64d1c9b0e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.260027] env[63371]: DEBUG nova.network.neutron [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1852.270065] env[63371]: DEBUG nova.network.neutron [req-ffdaf550-c7c8-447b-8061-554c50e55f1a req-7a7079ed-9387-41b4-aabf-d2c66e606c46 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updated VIF entry in instance network info cache for port 2c2ab976-7609-4012-a826-68288c4f7f64. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1852.270401] env[63371]: DEBUG nova.network.neutron [req-ffdaf550-c7c8-447b-8061-554c50e55f1a req-7a7079ed-9387-41b4-aabf-d2c66e606c46 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updating instance_info_cache with network_info: [{"id": "2c2ab976-7609-4012-a826-68288c4f7f64", "address": "fa:16:3e:c1:5a:c9", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c2ab976-76", "ovs_interfaceid": "2c2ab976-7609-4012-a826-68288c4f7f64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.385206] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774914, 'name': CreateVM_Task, 'duration_secs': 0.667544} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.385377] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1852.386101] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.386294] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.386640] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1852.386903] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b5b1240-8e54-4c27-9a30-12746e220892 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.391396] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1852.391396] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]528e274f-fc16-24ec-defd-764e3c0888d9" [ 1852.391396] env[63371]: _type = "Task" [ 1852.391396] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.399209] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528e274f-fc16-24ec-defd-764e3c0888d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.464311] env[63371]: DEBUG oslo_vmware.api [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774915, 'name': PowerOffVM_Task, 'duration_secs': 0.188223} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.464658] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1852.464926] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1852.465305] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e5280b5-ceb8-47d4-89c7-311e1270319c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.587077] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1852.587413] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1852.587691] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Deleting the datastore file [datastore1] a27ab7ba-481c-4292-a885-5dc8d8653d0b {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1852.588077] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e5730ae-0953-4cec-8285-d7e99cfddbbc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.596579] env[63371]: DEBUG oslo_vmware.api [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1852.596579] env[63371]: value = "task-1774917" [ 1852.596579] env[63371]: _type = "Task" [ 1852.596579] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.605765] env[63371]: DEBUG oslo_vmware.api [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774917, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.772924] env[63371]: DEBUG oslo_concurrency.lockutils [req-ffdaf550-c7c8-447b-8061-554c50e55f1a req-7a7079ed-9387-41b4-aabf-d2c66e606c46 service nova] Releasing lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.792356] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1852.792658] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98df5152-bd08-4da1-b995-d69007e0e0fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.800517] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1852.800517] env[63371]: value = "task-1774918" [ 1852.800517] env[63371]: _type = "Task" [ 1852.800517] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.807353] env[63371]: DEBUG nova.network.neutron [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1852.819089] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1852.819315] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1852.819556] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368492', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'name': 'volume-74095c08-847f-4b4a-b107-0d7acbea84a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9885de9e-c640-4d82-a47a-980988d89deb', 'attached_at': '', 'detached_at': '', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'serial': '74095c08-847f-4b4a-b107-0d7acbea84a7'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1852.820414] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a41b401-4c9f-4c07-ba91-339deff11210 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.842856] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5233bc6b-88cc-44d7-a4b7-317051865b71 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.849663] env[63371]: WARNING nova.virt.vmwareapi.driver [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1852.849945] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1852.850708] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f3d554-34dc-40cf-88a1-d7d307843e0f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.857178] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1852.857423] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7b6ce3aa-28da-44ee-98fe-770a71960ada {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.901382] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]528e274f-fc16-24ec-defd-764e3c0888d9, 'name': SearchDatastore_Task, 'duration_secs': 0.010183} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.901697] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.901934] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1852.902183] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.902519] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.902519] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1852.902753] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4e64243b-6657-4f9a-a2b0-8419aa95c23e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.910707] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1852.910891] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1852.911624] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da126d04-810b-4e80-8dbc-46e38a2eb834 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.916906] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1852.916906] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]529caea5-8167-4fb7-7791-c1856c4226da" [ 1852.916906] env[63371]: _type = "Task" [ 1852.916906] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.924650] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529caea5-8167-4fb7-7791-c1856c4226da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.968084] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1852.968346] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1852.968529] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleting the datastore file [datastore1] 9885de9e-c640-4d82-a47a-980988d89deb {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1852.968774] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-116adb40-f4b9-40ac-8309-90ca4c8fadb9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.977934] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1852.977934] env[63371]: value = "task-1774920" [ 1852.977934] env[63371]: _type = "Task" [ 1852.977934] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.994462] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774920, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.087713] env[63371]: DEBUG nova.network.neutron [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Updating instance_info_cache with network_info: [{"id": "0f111dbf-ca3c-4a52-8de4-ece209ccf945", "address": "fa:16:3e:b0:31:e5", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f111dbf-ca", "ovs_interfaceid": "0f111dbf-ca3c-4a52-8de4-ece209ccf945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.106334] env[63371]: DEBUG oslo_vmware.api [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774917, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220579} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.106604] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1853.106785] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1853.106951] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1853.107133] env[63371]: INFO nova.compute.manager [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1853.107370] env[63371]: DEBUG oslo.service.loopingcall [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1853.107560] env[63371]: DEBUG nova.compute.manager [-] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1853.107733] env[63371]: DEBUG nova.network.neutron [-] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1853.115773] env[63371]: DEBUG nova.network.neutron [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance_info_cache with network_info: [{"id": "fdb2262d-54b0-4555-939f-39915c982e09", "address": "fa:16:3e:0d:42:9d", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdb2262d-54", "ovs_interfaceid": "fdb2262d-54b0-4555-939f-39915c982e09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.427008] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529caea5-8167-4fb7-7791-c1856c4226da, 'name': SearchDatastore_Task, 'duration_secs': 0.016506} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.430031] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c6cc3c93-a0d9-48e0-8985-7e02f3d0f672 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.435861] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1853.435861] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e82d7d-92bd-2976-69a5-8c642439b79e" [ 1853.435861] env[63371]: _type = "Task" [ 1853.435861] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.445371] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e82d7d-92bd-2976-69a5-8c642439b79e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.459370] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a170560-52a4-40a6-b20d-27092300f926 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.466152] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5806d970-84eb-4380-8759-c343dc32bbd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.499343] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4cfe26e-f4da-443f-869f-0255fd26feb4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.506993] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774920, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.248973} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.509119] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1853.509305] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1853.509475] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1853.513425] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2afc9ca-3dda-4555-88b7-3fce30f7bd3a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.527632] env[63371]: DEBUG nova.compute.provider_tree [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1853.531042] env[63371]: DEBUG nova.compute.manager [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Received event network-vif-plugged-0f111dbf-ca3c-4a52-8de4-ece209ccf945 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1853.531289] env[63371]: DEBUG oslo_concurrency.lockutils [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] Acquiring lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.531504] env[63371]: DEBUG oslo_concurrency.lockutils [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] Lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.531713] env[63371]: DEBUG oslo_concurrency.lockutils [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] Lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.532463] env[63371]: DEBUG nova.compute.manager [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] No waiting events found dispatching network-vif-plugged-0f111dbf-ca3c-4a52-8de4-ece209ccf945 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1853.532463] env[63371]: WARNING nova.compute.manager [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Received unexpected event network-vif-plugged-0f111dbf-ca3c-4a52-8de4-ece209ccf945 for instance with vm_state building and task_state spawning. [ 1853.532463] env[63371]: DEBUG nova.compute.manager [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Received event network-changed-0f111dbf-ca3c-4a52-8de4-ece209ccf945 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1853.532463] env[63371]: DEBUG nova.compute.manager [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Refreshing instance network info cache due to event network-changed-0f111dbf-ca3c-4a52-8de4-ece209ccf945. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1853.532463] env[63371]: DEBUG oslo_concurrency.lockutils [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] Acquiring lock "refresh_cache-a083adca-0638-4a39-bd4c-30c64d1c9b0e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.590037] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "refresh_cache-a083adca-0638-4a39-bd4c-30c64d1c9b0e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.590397] env[63371]: DEBUG nova.compute.manager [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Instance network_info: |[{"id": "0f111dbf-ca3c-4a52-8de4-ece209ccf945", "address": "fa:16:3e:b0:31:e5", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f111dbf-ca", "ovs_interfaceid": "0f111dbf-ca3c-4a52-8de4-ece209ccf945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1853.590745] env[63371]: DEBUG oslo_concurrency.lockutils [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] Acquired lock "refresh_cache-a083adca-0638-4a39-bd4c-30c64d1c9b0e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.590930] env[63371]: DEBUG nova.network.neutron [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Refreshing network info cache for port 0f111dbf-ca3c-4a52-8de4-ece209ccf945 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1853.592148] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:31:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0f111dbf-ca3c-4a52-8de4-ece209ccf945', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1853.599565] env[63371]: DEBUG oslo.service.loopingcall [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1853.602429] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1853.603148] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fd326fb9-f6ec-4340-81b8-f9b0b3e59a06 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.617791] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.627500] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1853.627500] env[63371]: value = "task-1774921" [ 1853.627500] env[63371]: _type = "Task" [ 1853.627500] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.636031] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774921, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.827851] env[63371]: DEBUG nova.network.neutron [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Updated VIF entry in instance network info cache for port 0f111dbf-ca3c-4a52-8de4-ece209ccf945. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1853.828291] env[63371]: DEBUG nova.network.neutron [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Updating instance_info_cache with network_info: [{"id": "0f111dbf-ca3c-4a52-8de4-ece209ccf945", "address": "fa:16:3e:b0:31:e5", "network": {"id": "6a3972ee-bec5-4b50-a804-934447123274", "bridge": "br-int", "label": "tempest-ServersTestJSON-145394263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "58f967d3770541269fb89f48b3df58c9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0f111dbf-ca", "ovs_interfaceid": "0f111dbf-ca3c-4a52-8de4-ece209ccf945", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.903768] env[63371]: DEBUG nova.network.neutron [-] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1853.957585] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e82d7d-92bd-2976-69a5-8c642439b79e, 'name': SearchDatastore_Task, 'duration_secs': 0.010337} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.957899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.958204] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e0fa0976-9a73-4b8b-b011-2e15199be5ff/e0fa0976-9a73-4b8b-b011-2e15199be5ff.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1853.958515] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bc30ddbd-1662-4cd2-ab8d-9c073b279942 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.966928] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1853.966928] env[63371]: value = "task-1774922" [ 1853.966928] env[63371]: _type = "Task" [ 1853.966928] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.978841] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.019246] env[63371]: INFO nova.virt.block_device [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Booting with volume 74095c08-847f-4b4a-b107-0d7acbea84a7 at /dev/sdb [ 1854.033929] env[63371]: DEBUG nova.scheduler.client.report [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1854.054931] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-76d21d6a-6614-4521-a8ee-55c9f881ac9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.065638] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8cd0fa8-c1e3-4f1b-af10-4665ec6c784b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.104591] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8f1531d-03dd-4cfd-b2ba-526a48549a9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.113510] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75207bc-d563-4dbb-a3f8-1d400593aa2d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.138696] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774921, 'name': CreateVM_Task, 'duration_secs': 0.414055} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.138918] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1854.139551] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.139942] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.140078] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1854.151499] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1750094e-90bc-4ea5-93b2-ab400c3aeb1e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.155223] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2a1162c-4c84-498a-a005-39464f4677fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.158223] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e68417-c6b7-409c-9154-23a4fb9b55e7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.163867] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1854.163867] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524d716a-7244-31fd-a19a-db66163b8264" [ 1854.163867] env[63371]: _type = "Task" [ 1854.163867] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.190949] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1d554a-ed38-4161-9e01-3842c4948e64 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.196382] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390525ba-6f62-46ac-b64e-ab71e69a5dae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.209366] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance '1689fc63-3c07-4517-bbef-0011d860e9fc' progress to 83 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1854.213096] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524d716a-7244-31fd-a19a-db66163b8264, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.217700] env[63371]: DEBUG nova.virt.block_device [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Updating existing volume attachment record: fcddcd9c-5113-4de4-8872-4491f89eb919 {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1854.330842] env[63371]: DEBUG oslo_concurrency.lockutils [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] Releasing lock "refresh_cache-a083adca-0638-4a39-bd4c-30c64d1c9b0e" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.331149] env[63371]: DEBUG nova.compute.manager [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Received event network-vif-deleted-9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1854.331311] env[63371]: INFO nova.compute.manager [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Neutron deleted interface 9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2; detaching it from the instance and deleting it from the info cache [ 1854.331530] env[63371]: DEBUG nova.network.neutron [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.406775] env[63371]: INFO nova.compute.manager [-] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Took 1.30 seconds to deallocate network for instance. [ 1854.476928] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774922, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.538594] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.337s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.539187] env[63371]: DEBUG nova.compute.manager [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1854.694676] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524d716a-7244-31fd-a19a-db66163b8264, 'name': SearchDatastore_Task, 'duration_secs': 0.083102} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.694986] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.695239] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1854.695467] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.695624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.695797] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1854.696072] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca0b894a-ec59-435c-b6c6-f63564f81ff3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.704481] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1854.704656] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1854.705364] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da5c087c-5a34-4807-8754-1582beb620af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.709975] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1854.709975] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]529a51d9-7a57-6c30-33ca-7ad3ab8f6af4" [ 1854.709975] env[63371]: _type = "Task" [ 1854.709975] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.717227] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529a51d9-7a57-6c30-33ca-7ad3ab8f6af4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.722917] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1854.723160] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f0218d9-bdb9-47ec-8f29-5757dc6654d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.728394] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1854.728394] env[63371]: value = "task-1774923" [ 1854.728394] env[63371]: _type = "Task" [ 1854.728394] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.735415] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774923, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.834487] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-be040ab1-a2b7-4b61-a582-a64a65f5fa01 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.844066] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be324033-2a35-4497-87db-3462579277e4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.877230] env[63371]: DEBUG nova.compute.manager [req-32c008b5-1a69-48e9-ab64-da95d3e481db req-4e4398c7-c7aa-4aa3-a9c2-5d8d9961d456 service nova] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Detach interface failed, port_id=9df7ecd9-de56-43c1-aa63-6dbb16a3a5f2, reason: Instance a27ab7ba-481c-4292-a885-5dc8d8653d0b could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1854.914584] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.914843] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.915070] env[63371]: DEBUG nova.objects.instance [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lazy-loading 'resources' on Instance uuid a27ab7ba-481c-4292-a885-5dc8d8653d0b {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1854.923741] env[63371]: INFO nova.compute.manager [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Rebuilding instance [ 1854.974014] env[63371]: DEBUG nova.compute.manager [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1854.974853] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eb26068-34f0-4c48-96c5-1a34427715c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.980857] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774922, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525897} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.981466] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] e0fa0976-9a73-4b8b-b011-2e15199be5ff/e0fa0976-9a73-4b8b-b011-2e15199be5ff.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1854.981728] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1854.981991] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2be0ec98-94c2-4187-9df1-84a2458d812a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.992237] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1854.992237] env[63371]: value = "task-1774924" [ 1854.992237] env[63371]: _type = "Task" [ 1854.992237] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.001119] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774924, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.044529] env[63371]: DEBUG nova.compute.utils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1855.045924] env[63371]: DEBUG nova.compute.manager [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1855.046178] env[63371]: DEBUG nova.network.neutron [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1855.094990] env[63371]: DEBUG nova.policy [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4302f381e0948438b9ee23a33a0f982', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35882164a8734563a006675f2ec6ba71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1855.221410] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529a51d9-7a57-6c30-33ca-7ad3ab8f6af4, 'name': SearchDatastore_Task, 'duration_secs': 0.008221} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.222340] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56e0dd65-560b-42f7-8d35-0a648592abc2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.228140] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1855.228140] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52a045a4-45b9-5a59-4b9e-f6da7c95ce69" [ 1855.228140] env[63371]: _type = "Task" [ 1855.228140] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.239391] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a045a4-45b9-5a59-4b9e-f6da7c95ce69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.242846] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774923, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.434776] env[63371]: DEBUG nova.network.neutron [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Successfully created port: 3856f78a-36e6-49ce-8a81-1e94a9c8f1cc {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1855.489660] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1855.489995] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba9e516d-33d5-4bdf-a4d7-f266f4a4dddc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.499886] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1855.499886] env[63371]: value = "task-1774925" [ 1855.499886] env[63371]: _type = "Task" [ 1855.499886] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.506211] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774924, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060887} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.506772] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1855.507526] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84a241a-40d4-46d6-b0a6-9dc7b2c61d29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.512913] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774925, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.536052] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] e0fa0976-9a73-4b8b-b011-2e15199be5ff/e0fa0976-9a73-4b8b-b011-2e15199be5ff.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1855.538674] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01e47314-fae3-4536-81da-df899dcd1f72 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.554908] env[63371]: DEBUG nova.compute.manager [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1855.563490] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1855.563490] env[63371]: value = "task-1774926" [ 1855.563490] env[63371]: _type = "Task" [ 1855.563490] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.575286] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774926, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.699602] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c41b30d0-814a-4877-911a-2dd670662ec2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.707595] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e577dbc-b1b1-410f-8772-8c146f64ec6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.743913] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9896fb6-d9d4-4033-947a-70bcac77691b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.751709] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52a045a4-45b9-5a59-4b9e-f6da7c95ce69, 'name': SearchDatastore_Task, 'duration_secs': 0.011423} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.756368] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.756665] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] a083adca-0638-4a39-bd4c-30c64d1c9b0e/a083adca-0638-4a39-bd4c-30c64d1c9b0e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1855.756994] env[63371]: DEBUG oslo_vmware.api [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774923, 'name': PowerOnVM_Task, 'duration_secs': 0.542046} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.757203] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e1d09c7-b059-4be0-ad4a-e2add88998f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.760165] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6eee84-d614-41b2-879d-292a84eb5f62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.763734] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1855.763929] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3f55c28f-f2cd-4257-b54d-0d15443b29e7 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance '1689fc63-3c07-4517-bbef-0011d860e9fc' progress to 100 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1855.778586] env[63371]: DEBUG nova.compute.provider_tree [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1855.780980] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1855.780980] env[63371]: value = "task-1774927" [ 1855.780980] env[63371]: _type = "Task" [ 1855.780980] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.788644] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774927, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.009921] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774925, 'name': PowerOffVM_Task, 'duration_secs': 0.455131} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.010584] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1856.010584] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1856.011563] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4250bfa1-fdab-4066-bba8-aed11352bb39 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.018823] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1856.019125] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4462a3c1-4272-463f-a8b9-f9eab077a125 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.074529] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774926, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.159834] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1856.160082] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1856.160269] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleting the datastore file [datastore1] 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1856.160567] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a35c9545-20a2-4e8c-a615-a4b4af9ce603 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.169129] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1856.169129] env[63371]: value = "task-1774929" [ 1856.169129] env[63371]: _type = "Task" [ 1856.169129] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.177199] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774929, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.282607] env[63371]: DEBUG nova.scheduler.client.report [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1856.295671] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774927, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.520784} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.295946] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] a083adca-0638-4a39-bd4c-30c64d1c9b0e/a083adca-0638-4a39-bd4c-30c64d1c9b0e.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1856.296170] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1856.296904] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6f36bbc9-4703-47f7-b3c7-596d95841c88 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.303457] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1856.303457] env[63371]: value = "task-1774930" [ 1856.303457] env[63371]: _type = "Task" [ 1856.303457] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.313210] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774930, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.336091] env[63371]: DEBUG nova.virt.hardware [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1856.336374] env[63371]: DEBUG nova.virt.hardware [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1856.336533] env[63371]: DEBUG nova.virt.hardware [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1856.336711] env[63371]: DEBUG nova.virt.hardware [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1856.336855] env[63371]: DEBUG nova.virt.hardware [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1856.336996] env[63371]: DEBUG nova.virt.hardware [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1856.337214] env[63371]: DEBUG nova.virt.hardware [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1856.337375] env[63371]: DEBUG nova.virt.hardware [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1856.337545] env[63371]: DEBUG nova.virt.hardware [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1856.337711] env[63371]: DEBUG nova.virt.hardware [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1856.337878] env[63371]: DEBUG nova.virt.hardware [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1856.338745] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb478b5-4191-430c-9fad-98657e86e28a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.346279] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c58d269f-e17c-435c-9e73-d6a2cd4a9ec6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.359595] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bf:a3:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5b22240-e8c4-447a-bc92-3a83ae9674ec', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1856.367253] env[63371]: DEBUG oslo.service.loopingcall [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1856.367792] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1856.368018] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a382b46-08e0-4a62-b9fa-81b6d24285f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.388583] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1856.388583] env[63371]: value = "task-1774931" [ 1856.388583] env[63371]: _type = "Task" [ 1856.388583] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.396898] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774931, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.571287] env[63371]: DEBUG nova.compute.manager [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1856.579032] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774926, 'name': ReconfigVM_Task, 'duration_secs': 0.535804} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.579312] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Reconfigured VM instance instance-00000071 to attach disk [datastore1] e0fa0976-9a73-4b8b-b011-2e15199be5ff/e0fa0976-9a73-4b8b-b011-2e15199be5ff.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1856.579938] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-17b649a6-5118-4753-b9c4-02020eecb375 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.588618] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1856.588618] env[63371]: value = "task-1774932" [ 1856.588618] env[63371]: _type = "Task" [ 1856.588618] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.594226] env[63371]: DEBUG nova.virt.hardware [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1856.594497] env[63371]: DEBUG nova.virt.hardware [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1856.594655] env[63371]: DEBUG nova.virt.hardware [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1856.594831] env[63371]: DEBUG nova.virt.hardware [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1856.594977] env[63371]: DEBUG nova.virt.hardware [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1856.595138] env[63371]: DEBUG nova.virt.hardware [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1856.595340] env[63371]: DEBUG nova.virt.hardware [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1856.595494] env[63371]: DEBUG nova.virt.hardware [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1856.595679] env[63371]: DEBUG nova.virt.hardware [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1856.595852] env[63371]: DEBUG nova.virt.hardware [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1856.596031] env[63371]: DEBUG nova.virt.hardware [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1856.596822] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba756c6c-9e8a-41c7-a241-1a45d2818f34 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.602757] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774932, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.608143] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49302758-60ee-421a-803f-8c8b368a70f5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.678568] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774929, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24555} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.678846] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1856.679057] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1856.679270] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1856.791153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.876s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.814211] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774930, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.148594} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.814448] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1856.815346] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b173f71-5d22-48db-a162-57d95a33485e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.818925] env[63371]: INFO nova.scheduler.client.report [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Deleted allocations for instance a27ab7ba-481c-4292-a885-5dc8d8653d0b [ 1856.842038] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] a083adca-0638-4a39-bd4c-30c64d1c9b0e/a083adca-0638-4a39-bd4c-30c64d1c9b0e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1856.842430] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68b3a3c0-58d1-4a06-a766-d6dc02514089 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.861688] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1856.861688] env[63371]: value = "task-1774933" [ 1856.861688] env[63371]: _type = "Task" [ 1856.861688] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.871089] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774933, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.897361] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774931, 'name': CreateVM_Task, 'duration_secs': 0.444286} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.897564] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1856.898241] env[63371]: DEBUG oslo_concurrency.lockutils [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1856.898620] env[63371]: DEBUG oslo_concurrency.lockutils [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1856.898719] env[63371]: DEBUG oslo_concurrency.lockutils [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1856.898978] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56d44635-93f7-4b9e-a427-6e36bdabb54b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.903709] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1856.903709] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525d01ad-4caf-02c5-38c7-dd37c7d8fed0" [ 1856.903709] env[63371]: _type = "Task" [ 1856.903709] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.911202] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525d01ad-4caf-02c5-38c7-dd37c7d8fed0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.098724] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774932, 'name': Rename_Task, 'duration_secs': 0.192887} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.099012] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1857.099276] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2bcfca52-e25a-43b4-87a8-ec7dd9a71d15 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.106144] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1857.106144] env[63371]: value = "task-1774934" [ 1857.106144] env[63371]: _type = "Task" [ 1857.106144] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.116951] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774934, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.238343] env[63371]: DEBUG nova.compute.manager [req-45cf6d69-ae69-48e8-a8c1-1aa6aeaffa27 req-d602e0e5-876d-4929-94b9-e8c1e112082e service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Received event network-vif-plugged-3856f78a-36e6-49ce-8a81-1e94a9c8f1cc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1857.238343] env[63371]: DEBUG oslo_concurrency.lockutils [req-45cf6d69-ae69-48e8-a8c1-1aa6aeaffa27 req-d602e0e5-876d-4929-94b9-e8c1e112082e service nova] Acquiring lock "cfa04c51-c077-4f16-ae57-e54d62aac044-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1857.238343] env[63371]: DEBUG oslo_concurrency.lockutils [req-45cf6d69-ae69-48e8-a8c1-1aa6aeaffa27 req-d602e0e5-876d-4929-94b9-e8c1e112082e service nova] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.238343] env[63371]: DEBUG oslo_concurrency.lockutils [req-45cf6d69-ae69-48e8-a8c1-1aa6aeaffa27 req-d602e0e5-876d-4929-94b9-e8c1e112082e service nova] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.238616] env[63371]: DEBUG nova.compute.manager [req-45cf6d69-ae69-48e8-a8c1-1aa6aeaffa27 req-d602e0e5-876d-4929-94b9-e8c1e112082e service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] No waiting events found dispatching network-vif-plugged-3856f78a-36e6-49ce-8a81-1e94a9c8f1cc {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1857.238616] env[63371]: WARNING nova.compute.manager [req-45cf6d69-ae69-48e8-a8c1-1aa6aeaffa27 req-d602e0e5-876d-4929-94b9-e8c1e112082e service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Received unexpected event network-vif-plugged-3856f78a-36e6-49ce-8a81-1e94a9c8f1cc for instance with vm_state building and task_state spawning. [ 1857.327555] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7a5c569b-eeaf-4391-9aac-195a6ee0517b tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "a27ab7ba-481c-4292-a885-5dc8d8653d0b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.396s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.373117] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774933, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.415660] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525d01ad-4caf-02c5-38c7-dd37c7d8fed0, 'name': SearchDatastore_Task, 'duration_secs': 0.009028} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.415980] env[63371]: DEBUG oslo_concurrency.lockutils [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1857.416232] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1857.416478] env[63371]: DEBUG oslo_concurrency.lockutils [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1857.416629] env[63371]: DEBUG oslo_concurrency.lockutils [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1857.416825] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1857.417103] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-206cd83a-5bc2-4b7e-940b-4569ec6e4e9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.425858] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1857.426056] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1857.427131] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92b7b7ea-efe4-46a1-b89e-123f1f3e7f2c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.432329] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1857.432329] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52088ca9-bd76-ae4a-fdda-12403b5d6453" [ 1857.432329] env[63371]: _type = "Task" [ 1857.432329] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.442290] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52088ca9-bd76-ae4a-fdda-12403b5d6453, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.618395] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774934, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.722021] env[63371]: DEBUG nova.virt.hardware [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1857.722340] env[63371]: DEBUG nova.virt.hardware [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1857.722606] env[63371]: DEBUG nova.virt.hardware [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1857.722891] env[63371]: DEBUG nova.virt.hardware [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1857.723474] env[63371]: DEBUG nova.virt.hardware [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1857.723474] env[63371]: DEBUG nova.virt.hardware [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1857.723683] env[63371]: DEBUG nova.virt.hardware [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1857.723725] env[63371]: DEBUG nova.virt.hardware [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1857.723864] env[63371]: DEBUG nova.virt.hardware [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1857.724039] env[63371]: DEBUG nova.virt.hardware [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1857.724220] env[63371]: DEBUG nova.virt.hardware [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1857.725434] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad6f8a0-a3a7-4414-891d-56f123c332a9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.733497] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a6ce9f5-1e81-4014-a31c-73af2790b4c0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.748039] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:04:2f:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '45c89cd7-4637-40af-9652-42cad1269c7e', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1857.756271] env[63371]: DEBUG oslo.service.loopingcall [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1857.756271] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1857.756271] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-740ee652-9df1-410b-a6f0-ba412ca21984 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.783810] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1857.783810] env[63371]: value = "task-1774935" [ 1857.783810] env[63371]: _type = "Task" [ 1857.783810] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.794589] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774935, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.839938] env[63371]: DEBUG nova.network.neutron [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Successfully updated port: 3856f78a-36e6-49ce-8a81-1e94a9c8f1cc {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1857.874201] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774933, 'name': ReconfigVM_Task, 'duration_secs': 0.5167} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.875022] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Reconfigured VM instance instance-00000072 to attach disk [datastore1] a083adca-0638-4a39-bd4c-30c64d1c9b0e/a083adca-0638-4a39-bd4c-30c64d1c9b0e.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1857.875198] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f25f1a95-8c79-4a69-9a1c-19c0ff6b1d53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.881534] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1857.881534] env[63371]: value = "task-1774936" [ 1857.881534] env[63371]: _type = "Task" [ 1857.881534] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.892991] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774936, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.943504] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52088ca9-bd76-ae4a-fdda-12403b5d6453, 'name': SearchDatastore_Task, 'duration_secs': 0.009288} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.944472] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c37b4576-5b77-427d-a92b-6f9f41ca31db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.950283] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1857.950283] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52aa79b0-0fbc-2a71-aa30-2f47b8cb0129" [ 1857.950283] env[63371]: _type = "Task" [ 1857.950283] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.958235] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52aa79b0-0fbc-2a71-aa30-2f47b8cb0129, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.120051] env[63371]: DEBUG oslo_vmware.api [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774934, 'name': PowerOnVM_Task, 'duration_secs': 0.695388} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.120396] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1858.120655] env[63371]: INFO nova.compute.manager [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Took 9.12 seconds to spawn the instance on the hypervisor. [ 1858.120853] env[63371]: DEBUG nova.compute.manager [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1858.121655] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8629c35-f400-43dd-88cf-79248c8743d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.265720] env[63371]: DEBUG nova.compute.manager [req-96c2f2b7-35e7-4c35-8503-c9cd029b86fd req-4c90647c-8fa7-412a-8d3d-fddac2f1bdfa service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Received event network-changed-3856f78a-36e6-49ce-8a81-1e94a9c8f1cc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1858.265899] env[63371]: DEBUG nova.compute.manager [req-96c2f2b7-35e7-4c35-8503-c9cd029b86fd req-4c90647c-8fa7-412a-8d3d-fddac2f1bdfa service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Refreshing instance network info cache due to event network-changed-3856f78a-36e6-49ce-8a81-1e94a9c8f1cc. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1858.266159] env[63371]: DEBUG oslo_concurrency.lockutils [req-96c2f2b7-35e7-4c35-8503-c9cd029b86fd req-4c90647c-8fa7-412a-8d3d-fddac2f1bdfa service nova] Acquiring lock "refresh_cache-cfa04c51-c077-4f16-ae57-e54d62aac044" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.266312] env[63371]: DEBUG oslo_concurrency.lockutils [req-96c2f2b7-35e7-4c35-8503-c9cd029b86fd req-4c90647c-8fa7-412a-8d3d-fddac2f1bdfa service nova] Acquired lock "refresh_cache-cfa04c51-c077-4f16-ae57-e54d62aac044" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.266473] env[63371]: DEBUG nova.network.neutron [req-96c2f2b7-35e7-4c35-8503-c9cd029b86fd req-4c90647c-8fa7-412a-8d3d-fddac2f1bdfa service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Refreshing network info cache for port 3856f78a-36e6-49ce-8a81-1e94a9c8f1cc {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1858.294264] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774935, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.295916] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "1689fc63-3c07-4517-bbef-0011d860e9fc" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.296065] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.296250] env[63371]: DEBUG nova.compute.manager [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Going to confirm migration 6 {{(pid=63371) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1858.342131] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "refresh_cache-cfa04c51-c077-4f16-ae57-e54d62aac044" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.391738] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774936, 'name': Rename_Task, 'duration_secs': 0.147065} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.392012] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1858.392309] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3dc80bb-c430-49ae-8ab0-912f361c34c8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.399020] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1858.399020] env[63371]: value = "task-1774937" [ 1858.399020] env[63371]: _type = "Task" [ 1858.399020] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.406492] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774937, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.461534] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52aa79b0-0fbc-2a71-aa30-2f47b8cb0129, 'name': SearchDatastore_Task, 'duration_secs': 0.011884} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.461952] env[63371]: DEBUG oslo_concurrency.lockutils [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.462247] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9885de9e-c640-4d82-a47a-980988d89deb/9885de9e-c640-4d82-a47a-980988d89deb.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1858.462587] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42df9a45-3875-428d-be55-a908a96d9af3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.469999] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1858.469999] env[63371]: value = "task-1774938" [ 1858.469999] env[63371]: _type = "Task" [ 1858.469999] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.478032] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.593009] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "fac8df06-ab04-41ec-a32b-f46a08470a97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.593152] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "fac8df06-ab04-41ec-a32b-f46a08470a97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.638842] env[63371]: INFO nova.compute.manager [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Took 19.82 seconds to build instance. [ 1858.794131] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774935, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.822356] env[63371]: DEBUG nova.network.neutron [req-96c2f2b7-35e7-4c35-8503-c9cd029b86fd req-4c90647c-8fa7-412a-8d3d-fddac2f1bdfa service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1858.844282] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.845125] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquired lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.845125] env[63371]: DEBUG nova.network.neutron [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1858.845246] env[63371]: DEBUG nova.objects.instance [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lazy-loading 'info_cache' on Instance uuid 1689fc63-3c07-4517-bbef-0011d860e9fc {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1858.908364] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774937, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.909159] env[63371]: DEBUG nova.network.neutron [req-96c2f2b7-35e7-4c35-8503-c9cd029b86fd req-4c90647c-8fa7-412a-8d3d-fddac2f1bdfa service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.979617] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.095586] env[63371]: DEBUG nova.compute.manager [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1859.141902] env[63371]: DEBUG oslo_concurrency.lockutils [None req-acb7e134-65f2-4fba-a554-3fb8d9bd1fa4 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.337s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.295601] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774935, 'name': CreateVM_Task, 'duration_secs': 1.280757} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.295663] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1859.296365] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.296931] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.297187] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1859.297452] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc81f4b8-31e0-4698-8bb7-485994d42569 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.304580] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1859.304580] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520f8541-cd28-8d21-4ec9-2abb8d2c7dcb" [ 1859.304580] env[63371]: _type = "Task" [ 1859.304580] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.313699] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520f8541-cd28-8d21-4ec9-2abb8d2c7dcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.410510] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774937, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.410983] env[63371]: DEBUG oslo_concurrency.lockutils [req-96c2f2b7-35e7-4c35-8503-c9cd029b86fd req-4c90647c-8fa7-412a-8d3d-fddac2f1bdfa service nova] Releasing lock "refresh_cache-cfa04c51-c077-4f16-ae57-e54d62aac044" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.411349] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "refresh_cache-cfa04c51-c077-4f16-ae57-e54d62aac044" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.411505] env[63371]: DEBUG nova.network.neutron [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1859.480676] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774938, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.562026] env[63371]: DEBUG nova.compute.manager [req-b9d43a80-8321-4f5c-a892-85a85b331109 req-239c75e0-9cc0-4e70-963c-72dd828514be service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Received event network-changed-39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1859.562211] env[63371]: DEBUG nova.compute.manager [req-b9d43a80-8321-4f5c-a892-85a85b331109 req-239c75e0-9cc0-4e70-963c-72dd828514be service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Refreshing instance network info cache due to event network-changed-39fe8c75-7aaa-42da-a231-9c68310ef7c8. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1859.562450] env[63371]: DEBUG oslo_concurrency.lockutils [req-b9d43a80-8321-4f5c-a892-85a85b331109 req-239c75e0-9cc0-4e70-963c-72dd828514be service nova] Acquiring lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.562563] env[63371]: DEBUG oslo_concurrency.lockutils [req-b9d43a80-8321-4f5c-a892-85a85b331109 req-239c75e0-9cc0-4e70-963c-72dd828514be service nova] Acquired lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.562707] env[63371]: DEBUG nova.network.neutron [req-b9d43a80-8321-4f5c-a892-85a85b331109 req-239c75e0-9cc0-4e70-963c-72dd828514be service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Refreshing network info cache for port 39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1859.620178] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.620443] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.621892] env[63371]: INFO nova.compute.claims [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1859.815460] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520f8541-cd28-8d21-4ec9-2abb8d2c7dcb, 'name': SearchDatastore_Task, 'duration_secs': 0.051815} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.815783] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.816039] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1859.816289] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.816439] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.816627] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1859.816907] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90c24dd6-9eb6-4385-a819-d82c688c37da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.825632] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1859.825813] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1859.826575] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b75ec604-eb8b-4f9c-8c3d-93e3e335f4f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.831959] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1859.831959] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f4c43e-4c40-bb88-c9b0-a06e0fcdb161" [ 1859.831959] env[63371]: _type = "Task" [ 1859.831959] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.839751] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f4c43e-4c40-bb88-c9b0-a06e0fcdb161, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.909654] env[63371]: DEBUG oslo_vmware.api [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774937, 'name': PowerOnVM_Task, 'duration_secs': 1.362156} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.909961] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1859.910188] env[63371]: INFO nova.compute.manager [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Took 8.53 seconds to spawn the instance on the hypervisor. [ 1859.910364] env[63371]: DEBUG nova.compute.manager [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1859.911165] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b73bf6b-c114-451f-8617-7ea01c7c1954 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.953247] env[63371]: DEBUG nova.network.neutron [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1859.980706] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774938, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.048983} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.984438] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9885de9e-c640-4d82-a47a-980988d89deb/9885de9e-c640-4d82-a47a-980988d89deb.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1859.984659] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1859.984913] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e7f34149-d371-4402-8162-1e559177b2f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.991764] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1859.991764] env[63371]: value = "task-1774939" [ 1859.991764] env[63371]: _type = "Task" [ 1859.991764] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.000452] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774939, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.182222] env[63371]: DEBUG nova.network.neutron [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance_info_cache with network_info: [{"id": "fdb2262d-54b0-4555-939f-39915c982e09", "address": "fa:16:3e:0d:42:9d", "network": {"id": "336d5cea-eac3-4fb8-b2f7-25482e238702", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-404152107-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9fb0da840f6847f19f03a1db8a1c3f4f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7e0240aa-a694-48fc-a0f9-6f2d3e71aa12", "external-id": "nsx-vlan-transportzone-249", "segmentation_id": 249, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfdb2262d-54", "ovs_interfaceid": "fdb2262d-54b0-4555-939f-39915c982e09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.186209] env[63371]: DEBUG nova.network.neutron [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Updating instance_info_cache with network_info: [{"id": "3856f78a-36e6-49ce-8a81-1e94a9c8f1cc", "address": "fa:16:3e:58:11:e1", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3856f78a-36", "ovs_interfaceid": "3856f78a-36e6-49ce-8a81-1e94a9c8f1cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.343536] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f4c43e-4c40-bb88-c9b0-a06e0fcdb161, 'name': SearchDatastore_Task, 'duration_secs': 0.011766} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.344372] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e04a7268-cf7d-4c8b-9436-168c944e75e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.349804] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1860.349804] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd2b93-b88d-c6c2-a94e-874c1e869560" [ 1860.349804] env[63371]: _type = "Task" [ 1860.349804] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.357664] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd2b93-b88d-c6c2-a94e-874c1e869560, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.433443] env[63371]: INFO nova.compute.manager [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Took 20.71 seconds to build instance. [ 1860.444055] env[63371]: DEBUG nova.network.neutron [req-b9d43a80-8321-4f5c-a892-85a85b331109 req-239c75e0-9cc0-4e70-963c-72dd828514be service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updated VIF entry in instance network info cache for port 39fe8c75-7aaa-42da-a231-9c68310ef7c8. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1860.444055] env[63371]: DEBUG nova.network.neutron [req-b9d43a80-8321-4f5c-a892-85a85b331109 req-239c75e0-9cc0-4e70-963c-72dd828514be service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updating instance_info_cache with network_info: [{"id": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "address": "fa:16:3e:7a:b8:42", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39fe8c75-7a", "ovs_interfaceid": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.502015] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774939, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06684} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.502311] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1860.503106] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff185f36-0ca4-4547-b876-f168fe1f42c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.529048] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 9885de9e-c640-4d82-a47a-980988d89deb/9885de9e-c640-4d82-a47a-980988d89deb.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1860.529697] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e36fcb34-6dd4-4ab3-aee6-b9044064045a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.549768] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1860.549768] env[63371]: value = "task-1774940" [ 1860.549768] env[63371]: _type = "Task" [ 1860.549768] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.558444] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774940, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.687239] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Releasing lock "refresh_cache-1689fc63-3c07-4517-bbef-0011d860e9fc" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.687239] env[63371]: DEBUG nova.objects.instance [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lazy-loading 'migration_context' on Instance uuid 1689fc63-3c07-4517-bbef-0011d860e9fc {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1860.688650] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "refresh_cache-cfa04c51-c077-4f16-ae57-e54d62aac044" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.688874] env[63371]: DEBUG nova.compute.manager [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Instance network_info: |[{"id": "3856f78a-36e6-49ce-8a81-1e94a9c8f1cc", "address": "fa:16:3e:58:11:e1", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3856f78a-36", "ovs_interfaceid": "3856f78a-36e6-49ce-8a81-1e94a9c8f1cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1860.689301] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:11:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '96d8be6c-b557-4b40-b0f5-838c62a3c904', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3856f78a-36e6-49ce-8a81-1e94a9c8f1cc', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1860.698083] env[63371]: DEBUG oslo.service.loopingcall [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1860.701933] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1860.702183] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5862c06d-dfc2-4897-8d88-3894f879128c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.729486] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1860.729486] env[63371]: value = "task-1774941" [ 1860.729486] env[63371]: _type = "Task" [ 1860.729486] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.743780] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774941, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.859891] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cd2b93-b88d-c6c2-a94e-874c1e869560, 'name': SearchDatastore_Task, 'duration_secs': 0.016797} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.862606] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.862870] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd/3e2f17e7-8c9c-47c0-afb1-55e56eab74fd.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1860.863357] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ac9970e-1b66-403e-a19e-b1922c1e91a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.870269] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1860.870269] env[63371]: value = "task-1774942" [ 1860.870269] env[63371]: _type = "Task" [ 1860.870269] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.880625] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774942, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.888014] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f12e1b-65be-4502-9b77-4573435a3432 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.896220] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad34d2c-f72a-4e16-b04f-8d0b63ca02d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.926360] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef8ac22-0348-4151-80da-5280dcc12466 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.936018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d2cd142-e7d2-40eb-96fc-416ec3588d16 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.941104] env[63371]: DEBUG oslo_concurrency.lockutils [None req-726765a7-6841-43e4-8062-c6b6270307e0 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.229s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.951438] env[63371]: DEBUG oslo_concurrency.lockutils [req-b9d43a80-8321-4f5c-a892-85a85b331109 req-239c75e0-9cc0-4e70-963c-72dd828514be service nova] Releasing lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.951790] env[63371]: DEBUG nova.compute.provider_tree [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1861.060207] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774940, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.189119] env[63371]: DEBUG nova.objects.base [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Object Instance<1689fc63-3c07-4517-bbef-0011d860e9fc> lazy-loaded attributes: info_cache,migration_context {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1861.190093] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd98f39-6041-4479-97ee-85946875adfc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.209464] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-029c23c5-ffdf-42a9-826d-7d321f4165b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.215157] env[63371]: DEBUG oslo_vmware.api [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1861.215157] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52cfcd52-11d7-2739-b5dc-6724d82f8548" [ 1861.215157] env[63371]: _type = "Task" [ 1861.215157] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.222706] env[63371]: DEBUG oslo_vmware.api [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cfcd52-11d7-2739-b5dc-6724d82f8548, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.238260] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774941, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.380482] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774942, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.457901] env[63371]: DEBUG nova.scheduler.client.report [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1861.560811] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774940, 'name': ReconfigVM_Task, 'duration_secs': 0.594705} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.561132] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 9885de9e-c640-4d82-a47a-980988d89deb/9885de9e-c640-4d82-a47a-980988d89deb.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1861.562764] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'guest_format': None, 'encryption_format': None, 'device_name': '/dev/sda', 'disk_bus': None, 'encryption_options': None, 'encrypted': False, 'boot_index': 0, 'encryption_secret_uuid': None, 'device_type': 'disk', 'size': 0, 'image_id': '1aeb47a7-4e18-481d-b3c0-d33e8c7839d9'}], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'disk_bus': None, 'delete_on_termination': False, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368492', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'name': 'volume-74095c08-847f-4b4a-b107-0d7acbea84a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9885de9e-c640-4d82-a47a-980988d89deb', 'attached_at': '', 'detached_at': '', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'serial': '74095c08-847f-4b4a-b107-0d7acbea84a7'}, 'boot_index': None, 'device_type': None, 'attachment_id': 'fcddcd9c-5113-4de4-8872-4491f89eb919', 'mount_device': '/dev/sdb', 'volume_type': None}], 'swap': None} {{(pid=63371) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1861.562965] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1861.563213] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368492', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'name': 'volume-74095c08-847f-4b4a-b107-0d7acbea84a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9885de9e-c640-4d82-a47a-980988d89deb', 'attached_at': '', 'detached_at': '', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'serial': '74095c08-847f-4b4a-b107-0d7acbea84a7'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1861.564038] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e1810c-3fbf-4498-b844-c0e35ef2f6ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.584733] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8f03ff-7502-4348-a31e-27a5e6aba930 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.604103] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f938228b-4020-4c58-8d52-32112e3dd77f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.604393] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f938228b-4020-4c58-8d52-32112e3dd77f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1861.604580] env[63371]: DEBUG nova.compute.manager [None req-f938228b-4020-4c58-8d52-32112e3dd77f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1861.614378] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] volume-74095c08-847f-4b4a-b107-0d7acbea84a7/volume-74095c08-847f-4b4a-b107-0d7acbea84a7.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1861.615203] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-193ae1ba-243a-4864-afb2-79652165d51f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.618375] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f311b02f-25ae-46cc-befe-9ce145e1fb18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.636297] env[63371]: DEBUG nova.compute.manager [None req-f938228b-4020-4c58-8d52-32112e3dd77f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63371) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 1861.636745] env[63371]: DEBUG nova.objects.instance [None req-f938228b-4020-4c58-8d52-32112e3dd77f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lazy-loading 'flavor' on Instance uuid a083adca-0638-4a39-bd4c-30c64d1c9b0e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1861.640648] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1861.640648] env[63371]: value = "task-1774943" [ 1861.640648] env[63371]: _type = "Task" [ 1861.640648] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.651396] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774943, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.691067] env[63371]: DEBUG nova.compute.manager [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Received event network-changed-2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1861.691067] env[63371]: DEBUG nova.compute.manager [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Refreshing instance network info cache due to event network-changed-2c2ab976-7609-4012-a826-68288c4f7f64. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1861.691067] env[63371]: DEBUG oslo_concurrency.lockutils [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] Acquiring lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.691067] env[63371]: DEBUG oslo_concurrency.lockutils [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] Acquired lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1861.691067] env[63371]: DEBUG nova.network.neutron [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Refreshing network info cache for port 2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1861.727427] env[63371]: DEBUG oslo_vmware.api [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52cfcd52-11d7-2739-b5dc-6724d82f8548, 'name': SearchDatastore_Task, 'duration_secs': 0.01057} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.728051] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1861.744742] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774941, 'name': CreateVM_Task, 'duration_secs': 0.882789} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.744742] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1861.744742] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.744742] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1861.744742] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1861.744742] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bced840-adfb-4db2-ace4-cc5c5c5a1e1f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.748348] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1861.748348] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520c6c8e-17d5-506a-4255-12a1cfd6c723" [ 1861.748348] env[63371]: _type = "Task" [ 1861.748348] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.757386] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520c6c8e-17d5-506a-4255-12a1cfd6c723, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.881305] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774942, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.882578} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.881649] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd/3e2f17e7-8c9c-47c0-afb1-55e56eab74fd.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1861.881792] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1861.882056] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-985d1b87-4885-4e85-bea1-3313e9e41a86 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.888630] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1861.888630] env[63371]: value = "task-1774944" [ 1861.888630] env[63371]: _type = "Task" [ 1861.888630] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.898055] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774944, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.963124] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1861.963739] env[63371]: DEBUG nova.compute.manager [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1861.966395] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.238s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1862.146120] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f938228b-4020-4c58-8d52-32112e3dd77f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1862.146641] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0dd1ad5c-6c34-4a7d-92eb-ea8ede8014ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.153724] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774943, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.155025] env[63371]: DEBUG oslo_vmware.api [None req-f938228b-4020-4c58-8d52-32112e3dd77f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1862.155025] env[63371]: value = "task-1774945" [ 1862.155025] env[63371]: _type = "Task" [ 1862.155025] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.164850] env[63371]: DEBUG oslo_vmware.api [None req-f938228b-4020-4c58-8d52-32112e3dd77f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774945, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.258914] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520c6c8e-17d5-506a-4255-12a1cfd6c723, 'name': SearchDatastore_Task, 'duration_secs': 0.013915} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.259329] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.259598] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1862.259872] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.260042] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.260232] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1862.262777] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c6b2c5b-986a-4534-ab96-aa7a14ec7455 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.274271] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1862.274464] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1862.275552] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-384a22f6-7ce8-4309-b71e-3bd3406063ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.284346] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1862.284346] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52867edd-e324-4ad1-c109-20b55203b945" [ 1862.284346] env[63371]: _type = "Task" [ 1862.284346] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.291843] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52867edd-e324-4ad1-c109-20b55203b945, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.397791] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774944, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073271} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.398638] env[63371]: DEBUG nova.network.neutron [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updated VIF entry in instance network info cache for port 2c2ab976-7609-4012-a826-68288c4f7f64. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1862.398960] env[63371]: DEBUG nova.network.neutron [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updating instance_info_cache with network_info: [{"id": "2c2ab976-7609-4012-a826-68288c4f7f64", "address": "fa:16:3e:c1:5a:c9", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c2ab976-76", "ovs_interfaceid": "2c2ab976-7609-4012-a826-68288c4f7f64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.400349] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1862.401253] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c7be510-df80-4b98-8da8-5c70c88dfe03 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.423428] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd/3e2f17e7-8c9c-47c0-afb1-55e56eab74fd.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1862.424228] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56a1c059-eb4d-4db0-bd18-8e6896d634c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.444603] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1862.444603] env[63371]: value = "task-1774946" [ 1862.444603] env[63371]: _type = "Task" [ 1862.444603] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.452943] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774946, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.469979] env[63371]: DEBUG nova.compute.utils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1862.474575] env[63371]: DEBUG nova.compute.manager [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1862.474749] env[63371]: DEBUG nova.network.neutron [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1862.519404] env[63371]: DEBUG nova.policy [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09b4f1693ef54996899c199362970fe3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '026682964c784968a24e654531c14aa9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1862.653469] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774943, 'name': ReconfigVM_Task, 'duration_secs': 0.566449} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.653772] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Reconfigured VM instance instance-00000067 to attach disk [datastore1] volume-74095c08-847f-4b4a-b107-0d7acbea84a7/volume-74095c08-847f-4b4a-b107-0d7acbea84a7.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1862.660816] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efab87eb-4b53-4e6f-ad8f-31ffa48a0e7d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.678683] env[63371]: DEBUG oslo_vmware.api [None req-f938228b-4020-4c58-8d52-32112e3dd77f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774945, 'name': PowerOffVM_Task, 'duration_secs': 0.195211} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.679925] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f938228b-4020-4c58-8d52-32112e3dd77f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1862.680094] env[63371]: DEBUG nova.compute.manager [None req-f938228b-4020-4c58-8d52-32112e3dd77f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1862.680423] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1862.680423] env[63371]: value = "task-1774947" [ 1862.680423] env[63371]: _type = "Task" [ 1862.680423] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.683271] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7896d484-7487-4153-8752-fe27b1c7f535 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.698940] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774947, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.699693] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb2771c7-8feb-4b39-a324-215247e82989 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.706993] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e556f77e-57d2-4557-abfc-7756b193a117 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.739394] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b6da22-419e-4a83-adf5-1944c12eab49 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.751029] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4fd754-d894-4244-81e7-8337626aa6cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.762884] env[63371]: DEBUG nova.compute.provider_tree [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1862.775621] env[63371]: DEBUG nova.network.neutron [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Successfully created port: 28f8575e-6c81-45a8-9471-fcd0e5f8ae9a {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1862.794776] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52867edd-e324-4ad1-c109-20b55203b945, 'name': SearchDatastore_Task, 'duration_secs': 0.028329} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.795685] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c86fe9b-105b-4add-84f7-a46fad440536 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.801875] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1862.801875] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f65c12-c111-0a0e-a7cf-85f8372e20a2" [ 1862.801875] env[63371]: _type = "Task" [ 1862.801875] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.810992] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f65c12-c111-0a0e-a7cf-85f8372e20a2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.902234] env[63371]: DEBUG oslo_concurrency.lockutils [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] Releasing lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.902478] env[63371]: DEBUG nova.compute.manager [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Received event network-changed-2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1862.902680] env[63371]: DEBUG nova.compute.manager [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Refreshing instance network info cache due to event network-changed-2c2ab976-7609-4012-a826-68288c4f7f64. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1862.902989] env[63371]: DEBUG oslo_concurrency.lockutils [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] Acquiring lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1862.903232] env[63371]: DEBUG oslo_concurrency.lockutils [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] Acquired lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1862.903501] env[63371]: DEBUG nova.network.neutron [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Refreshing network info cache for port 2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1862.954523] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774946, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.975391] env[63371]: DEBUG nova.compute.manager [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1863.195074] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774947, 'name': ReconfigVM_Task, 'duration_secs': 0.398189} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.195390] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368492', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'name': 'volume-74095c08-847f-4b4a-b107-0d7acbea84a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9885de9e-c640-4d82-a47a-980988d89deb', 'attached_at': '', 'detached_at': '', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'serial': '74095c08-847f-4b4a-b107-0d7acbea84a7'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1863.195962] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ec14dfbf-28c7-4fe7-b7fc-1fa94f2d706e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.202124] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1863.202124] env[63371]: value = "task-1774948" [ 1863.202124] env[63371]: _type = "Task" [ 1863.202124] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.207159] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f938228b-4020-4c58-8d52-32112e3dd77f tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.603s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1863.217525] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774948, 'name': Rename_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.285855] env[63371]: ERROR nova.scheduler.client.report [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [req-2c7d3376-1e4b-4259-ac33-0dae2a1fbff5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2c7d3376-1e4b-4259-ac33-0dae2a1fbff5"}]} [ 1863.308663] env[63371]: DEBUG nova.scheduler.client.report [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1863.316365] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f65c12-c111-0a0e-a7cf-85f8372e20a2, 'name': SearchDatastore_Task, 'duration_secs': 0.009885} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.317034] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.317034] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cfa04c51-c077-4f16-ae57-e54d62aac044/cfa04c51-c077-4f16-ae57-e54d62aac044.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1863.317210] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0770fa7-d00f-4d3e-ba74-64ede0e69708 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.323033] env[63371]: DEBUG nova.scheduler.client.report [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1863.323252] env[63371]: DEBUG nova.compute.provider_tree [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1863.327267] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1863.327267] env[63371]: value = "task-1774949" [ 1863.327267] env[63371]: _type = "Task" [ 1863.327267] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.335144] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774949, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.337167] env[63371]: DEBUG nova.scheduler.client.report [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1863.357282] env[63371]: DEBUG nova.scheduler.client.report [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1863.460933] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774946, 'name': ReconfigVM_Task, 'duration_secs': 0.830843} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.461312] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd/3e2f17e7-8c9c-47c0-afb1-55e56eab74fd.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1863.462025] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-86953be1-786b-4c6e-af64-29075dec5e52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.471348] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1863.471348] env[63371]: value = "task-1774950" [ 1863.471348] env[63371]: _type = "Task" [ 1863.471348] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.484935] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774950, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.611622] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a0732d6-acf6-4b5d-a1ff-a39c54629f21 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.619992] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf16e9f-157a-4ef8-bb9f-40b92f15b89b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.654040] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb303c0-44c5-4bc6-8a6b-25eabf2331d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.663882] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078a47b6-2bbe-42c8-9632-2ae109b21879 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.677912] env[63371]: DEBUG nova.compute.provider_tree [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1863.717119] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774948, 'name': Rename_Task, 'duration_secs': 0.205874} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.717119] env[63371]: DEBUG nova.network.neutron [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updated VIF entry in instance network info cache for port 2c2ab976-7609-4012-a826-68288c4f7f64. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1863.717658] env[63371]: DEBUG nova.network.neutron [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updating instance_info_cache with network_info: [{"id": "2c2ab976-7609-4012-a826-68288c4f7f64", "address": "fa:16:3e:c1:5a:c9", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c2ab976-76", "ovs_interfaceid": "2c2ab976-7609-4012-a826-68288c4f7f64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.719491] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1863.722026] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76c2bbd7-76ae-48cb-91c8-67cef4641c0d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.727825] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1863.727825] env[63371]: value = "task-1774951" [ 1863.727825] env[63371]: _type = "Task" [ 1863.727825] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.739741] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774951, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.837187] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774949, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503278} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.837466] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] cfa04c51-c077-4f16-ae57-e54d62aac044/cfa04c51-c077-4f16-ae57-e54d62aac044.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1863.837674] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1863.837932] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1dc4839-49f4-4724-a4f4-e78a96965fd0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.845408] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1863.845408] env[63371]: value = "task-1774952" [ 1863.845408] env[63371]: _type = "Task" [ 1863.845408] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.854277] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774952, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.981407] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774950, 'name': Rename_Task, 'duration_secs': 0.207412} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.981723] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1863.981973] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f4008c1-8557-4e0a-ad8b-ba2e40a87f4b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.986727] env[63371]: DEBUG nova.compute.manager [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1863.989898] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1863.989898] env[63371]: value = "task-1774953" [ 1863.989898] env[63371]: _type = "Task" [ 1863.989898] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.998493] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774953, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.012544] env[63371]: DEBUG nova.virt.hardware [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1864.012783] env[63371]: DEBUG nova.virt.hardware [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1864.012933] env[63371]: DEBUG nova.virt.hardware [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1864.013525] env[63371]: DEBUG nova.virt.hardware [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1864.013525] env[63371]: DEBUG nova.virt.hardware [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1864.013679] env[63371]: DEBUG nova.virt.hardware [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1864.013887] env[63371]: DEBUG nova.virt.hardware [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1864.014087] env[63371]: DEBUG nova.virt.hardware [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1864.014493] env[63371]: DEBUG nova.virt.hardware [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1864.014683] env[63371]: DEBUG nova.virt.hardware [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1864.014860] env[63371]: DEBUG nova.virt.hardware [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1864.015716] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e3e961-cfcf-4dac-ab04-a4d9318ed5df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.023095] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f281def-446b-49a9-8a4b-fa87b729aabf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.209139] env[63371]: DEBUG nova.scheduler.client.report [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 165 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1864.209329] env[63371]: DEBUG nova.compute.provider_tree [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 165 to 166 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1864.209514] env[63371]: DEBUG nova.compute.provider_tree [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1864.220943] env[63371]: DEBUG oslo_concurrency.lockutils [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] Releasing lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.221241] env[63371]: DEBUG nova.compute.manager [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Received event network-changed-39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1864.221739] env[63371]: DEBUG nova.compute.manager [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Refreshing instance network info cache due to event network-changed-39fe8c75-7aaa-42da-a231-9c68310ef7c8. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1864.221739] env[63371]: DEBUG oslo_concurrency.lockutils [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] Acquiring lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1864.221941] env[63371]: DEBUG oslo_concurrency.lockutils [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] Acquired lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.222209] env[63371]: DEBUG nova.network.neutron [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Refreshing network info cache for port 39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1864.238418] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774951, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.355929] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774952, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10861} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.357034] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1864.358203] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1eeb422-596a-42a4-a272-199723f56ca4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.362901] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.363147] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.363374] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.363572] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.363736] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.386213] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] cfa04c51-c077-4f16-ae57-e54d62aac044/cfa04c51-c077-4f16-ae57-e54d62aac044.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1864.386828] env[63371]: INFO nova.compute.manager [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Terminating instance [ 1864.388670] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b784cb1-1e82-47ac-ba59-6a0bdb302707 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.406811] env[63371]: DEBUG nova.compute.manager [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1864.406923] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1864.407655] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376fa09c-46f5-4f22-b3ff-98ccdcfbb4ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.413659] env[63371]: DEBUG nova.compute.manager [req-346c2891-fe90-4696-b6ab-292fe6790e77 req-f9ea5c93-5be3-4c5e-839c-5f6f60fe19fb service nova] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Received event network-vif-plugged-28f8575e-6c81-45a8-9471-fcd0e5f8ae9a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1864.413993] env[63371]: DEBUG oslo_concurrency.lockutils [req-346c2891-fe90-4696-b6ab-292fe6790e77 req-f9ea5c93-5be3-4c5e-839c-5f6f60fe19fb service nova] Acquiring lock "fac8df06-ab04-41ec-a32b-f46a08470a97-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.414259] env[63371]: DEBUG oslo_concurrency.lockutils [req-346c2891-fe90-4696-b6ab-292fe6790e77 req-f9ea5c93-5be3-4c5e-839c-5f6f60fe19fb service nova] Lock "fac8df06-ab04-41ec-a32b-f46a08470a97-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1864.414408] env[63371]: DEBUG oslo_concurrency.lockutils [req-346c2891-fe90-4696-b6ab-292fe6790e77 req-f9ea5c93-5be3-4c5e-839c-5f6f60fe19fb service nova] Lock "fac8df06-ab04-41ec-a32b-f46a08470a97-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.414595] env[63371]: DEBUG nova.compute.manager [req-346c2891-fe90-4696-b6ab-292fe6790e77 req-f9ea5c93-5be3-4c5e-839c-5f6f60fe19fb service nova] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] No waiting events found dispatching network-vif-plugged-28f8575e-6c81-45a8-9471-fcd0e5f8ae9a {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1864.414765] env[63371]: WARNING nova.compute.manager [req-346c2891-fe90-4696-b6ab-292fe6790e77 req-f9ea5c93-5be3-4c5e-839c-5f6f60fe19fb service nova] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Received unexpected event network-vif-plugged-28f8575e-6c81-45a8-9471-fcd0e5f8ae9a for instance with vm_state building and task_state spawning. [ 1864.418339] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1864.418339] env[63371]: value = "task-1774954" [ 1864.418339] env[63371]: _type = "Task" [ 1864.418339] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.420470] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1864.423385] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f21ef083-d72e-4305-bb83-b97741d13582 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.433232] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774954, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.500351] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774953, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.501485] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1864.501679] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1864.501855] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleting the datastore file [datastore1] a083adca-0638-4a39-bd4c-30c64d1c9b0e {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1864.502112] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-496d08f5-511a-461e-b452-f305697628d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.506600] env[63371]: DEBUG nova.network.neutron [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Successfully updated port: 28f8575e-6c81-45a8-9471-fcd0e5f8ae9a {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1864.512020] env[63371]: DEBUG oslo_vmware.api [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1864.512020] env[63371]: value = "task-1774956" [ 1864.512020] env[63371]: _type = "Task" [ 1864.512020] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.517043] env[63371]: DEBUG oslo_vmware.api [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774956, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.739716] env[63371]: DEBUG oslo_vmware.api [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1774951, 'name': PowerOnVM_Task, 'duration_secs': 0.968435} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.740613] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1864.740848] env[63371]: DEBUG nova.compute.manager [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1864.741655] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34b7f5db-05d1-49da-a241-eef2089dcef8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.932761] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774954, 'name': ReconfigVM_Task, 'duration_secs': 0.332872} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.933502] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Reconfigured VM instance instance-00000073 to attach disk [datastore1] cfa04c51-c077-4f16-ae57-e54d62aac044/cfa04c51-c077-4f16-ae57-e54d62aac044.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1864.934161] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0d8938e-65f5-48fe-813a-5478b9e4e154 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.941901] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1864.941901] env[63371]: value = "task-1774957" [ 1864.941901] env[63371]: _type = "Task" [ 1864.941901] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.950963] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774957, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.972094] env[63371]: DEBUG nova.network.neutron [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updated VIF entry in instance network info cache for port 39fe8c75-7aaa-42da-a231-9c68310ef7c8. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1864.972850] env[63371]: DEBUG nova.network.neutron [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updating instance_info_cache with network_info: [{"id": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "address": "fa:16:3e:7a:b8:42", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39fe8c75-7a", "ovs_interfaceid": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.001048] env[63371]: DEBUG oslo_vmware.api [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1774953, 'name': PowerOnVM_Task, 'duration_secs': 0.870333} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.002041] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1865.002041] env[63371]: DEBUG nova.compute.manager [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1865.002363] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe406125-35bf-4c3c-9ded-5516db98ff35 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.008980] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "refresh_cache-fac8df06-ab04-41ec-a32b-f46a08470a97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.009129] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquired lock "refresh_cache-fac8df06-ab04-41ec-a32b-f46a08470a97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.009266] env[63371]: DEBUG nova.network.neutron [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1865.020974] env[63371]: DEBUG oslo_vmware.api [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774956, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167643} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.021159] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1865.021226] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1865.021792] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1865.021792] env[63371]: INFO nova.compute.manager [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1865.021792] env[63371]: DEBUG oslo.service.loopingcall [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1865.021980] env[63371]: DEBUG nova.compute.manager [-] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1865.022036] env[63371]: DEBUG nova.network.neutron [-] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1865.220778] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.254s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.260999] env[63371]: DEBUG oslo_concurrency.lockutils [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.261293] env[63371]: DEBUG oslo_concurrency.lockutils [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.261534] env[63371]: DEBUG nova.objects.instance [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1865.452424] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774957, 'name': Rename_Task, 'duration_secs': 0.139796} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.452701] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1865.452942] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ba1ac31-468c-4fc5-b68d-ee9369ab8fb1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.459224] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1865.459224] env[63371]: value = "task-1774958" [ 1865.459224] env[63371]: _type = "Task" [ 1865.459224] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.466414] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774958, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.475022] env[63371]: DEBUG oslo_concurrency.lockutils [req-6100840b-37d2-47e0-8349-d410d6baec47 req-f985f5cc-57aa-40b5-a14e-233353a5f4d5 service nova] Releasing lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.522946] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.542484] env[63371]: DEBUG nova.network.neutron [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1865.672533] env[63371]: DEBUG nova.network.neutron [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Updating instance_info_cache with network_info: [{"id": "28f8575e-6c81-45a8-9471-fcd0e5f8ae9a", "address": "fa:16:3e:3d:1f:c2", "network": {"id": "37c74e18-5c2a-4df3-b429-8a4fb9f29cc0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1560662466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "026682964c784968a24e654531c14aa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28f8575e-6c", "ovs_interfaceid": "28f8575e-6c81-45a8-9471-fcd0e5f8ae9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.769093] env[63371]: DEBUG nova.network.neutron [-] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.777697] env[63371]: INFO nova.scheduler.client.report [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted allocation for migration 67d1974a-6979-4f3e-959c-350c3b20701a [ 1865.970028] env[63371]: DEBUG oslo_vmware.api [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1774958, 'name': PowerOnVM_Task, 'duration_secs': 0.474126} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1865.970028] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1865.970269] env[63371]: INFO nova.compute.manager [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1865.970269] env[63371]: DEBUG nova.compute.manager [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1865.971059] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-215c1a34-e1fb-45fa-8528-86ab78a0dd8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.175348] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Releasing lock "refresh_cache-fac8df06-ab04-41ec-a32b-f46a08470a97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.175735] env[63371]: DEBUG nova.compute.manager [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Instance network_info: |[{"id": "28f8575e-6c81-45a8-9471-fcd0e5f8ae9a", "address": "fa:16:3e:3d:1f:c2", "network": {"id": "37c74e18-5c2a-4df3-b429-8a4fb9f29cc0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1560662466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "026682964c784968a24e654531c14aa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28f8575e-6c", "ovs_interfaceid": "28f8575e-6c81-45a8-9471-fcd0e5f8ae9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1866.176217] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:1f:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e7a4976-597e-4636-990e-6062b5faadee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '28f8575e-6c81-45a8-9471-fcd0e5f8ae9a', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1866.184211] env[63371]: DEBUG oslo.service.loopingcall [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1866.184448] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1866.184679] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-53ae54f6-7ac1-4450-8011-b397ce3e9b00 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.204262] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1866.204262] env[63371]: value = "task-1774959" [ 1866.204262] env[63371]: _type = "Task" [ 1866.204262] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.211535] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774959, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.269054] env[63371]: DEBUG oslo_concurrency.lockutils [None req-82016377-5194-4ef4-9349-8ac179e83736 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.270204] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.747s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.270420] env[63371]: DEBUG nova.objects.instance [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63371) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1866.273188] env[63371]: INFO nova.compute.manager [-] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Took 1.25 seconds to deallocate network for instance. [ 1866.285108] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.989s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.491036] env[63371]: INFO nova.compute.manager [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Took 21.83 seconds to build instance. [ 1866.566471] env[63371]: DEBUG nova.compute.manager [req-080aa146-fa03-4a69-ac8a-1a23a9bb232b req-fd3a97c6-a739-4e82-8247-75fca627186d service nova] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Received event network-changed-28f8575e-6c81-45a8-9471-fcd0e5f8ae9a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1866.566696] env[63371]: DEBUG nova.compute.manager [req-080aa146-fa03-4a69-ac8a-1a23a9bb232b req-fd3a97c6-a739-4e82-8247-75fca627186d service nova] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Refreshing instance network info cache due to event network-changed-28f8575e-6c81-45a8-9471-fcd0e5f8ae9a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1866.567352] env[63371]: DEBUG oslo_concurrency.lockutils [req-080aa146-fa03-4a69-ac8a-1a23a9bb232b req-fd3a97c6-a739-4e82-8247-75fca627186d service nova] Acquiring lock "refresh_cache-fac8df06-ab04-41ec-a32b-f46a08470a97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.567607] env[63371]: DEBUG oslo_concurrency.lockutils [req-080aa146-fa03-4a69-ac8a-1a23a9bb232b req-fd3a97c6-a739-4e82-8247-75fca627186d service nova] Acquired lock "refresh_cache-fac8df06-ab04-41ec-a32b-f46a08470a97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.567799] env[63371]: DEBUG nova.network.neutron [req-080aa146-fa03-4a69-ac8a-1a23a9bb232b req-fd3a97c6-a739-4e82-8247-75fca627186d service nova] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Refreshing network info cache for port 28f8575e-6c81-45a8-9471-fcd0e5f8ae9a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1866.714047] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774959, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.780529] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.993214] env[63371]: DEBUG oslo_concurrency.lockutils [None req-784be244-5005-4292-a362-5cc95e389af3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.348s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.143939] env[63371]: DEBUG nova.compute.manager [req-a5d53355-2795-4c78-81f2-3789059cf090 req-b5a7e9aa-8e48-44ae-a9ab-d5620e513cae service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Received event network-changed-3856f78a-36e6-49ce-8a81-1e94a9c8f1cc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1867.144044] env[63371]: DEBUG nova.compute.manager [req-a5d53355-2795-4c78-81f2-3789059cf090 req-b5a7e9aa-8e48-44ae-a9ab-d5620e513cae service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Refreshing instance network info cache due to event network-changed-3856f78a-36e6-49ce-8a81-1e94a9c8f1cc. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1867.144270] env[63371]: DEBUG oslo_concurrency.lockutils [req-a5d53355-2795-4c78-81f2-3789059cf090 req-b5a7e9aa-8e48-44ae-a9ab-d5620e513cae service nova] Acquiring lock "refresh_cache-cfa04c51-c077-4f16-ae57-e54d62aac044" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1867.144449] env[63371]: DEBUG oslo_concurrency.lockutils [req-a5d53355-2795-4c78-81f2-3789059cf090 req-b5a7e9aa-8e48-44ae-a9ab-d5620e513cae service nova] Acquired lock "refresh_cache-cfa04c51-c077-4f16-ae57-e54d62aac044" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1867.144560] env[63371]: DEBUG nova.network.neutron [req-a5d53355-2795-4c78-81f2-3789059cf090 req-b5a7e9aa-8e48-44ae-a9ab-d5620e513cae service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Refreshing network info cache for port 3856f78a-36e6-49ce-8a81-1e94a9c8f1cc {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1867.214909] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774959, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.265966] env[63371]: DEBUG nova.network.neutron [req-080aa146-fa03-4a69-ac8a-1a23a9bb232b req-fd3a97c6-a739-4e82-8247-75fca627186d service nova] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Updated VIF entry in instance network info cache for port 28f8575e-6c81-45a8-9471-fcd0e5f8ae9a. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1867.266491] env[63371]: DEBUG nova.network.neutron [req-080aa146-fa03-4a69-ac8a-1a23a9bb232b req-fd3a97c6-a739-4e82-8247-75fca627186d service nova] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Updating instance_info_cache with network_info: [{"id": "28f8575e-6c81-45a8-9471-fcd0e5f8ae9a", "address": "fa:16:3e:3d:1f:c2", "network": {"id": "37c74e18-5c2a-4df3-b429-8a4fb9f29cc0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1560662466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "026682964c784968a24e654531c14aa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap28f8575e-6c", "ovs_interfaceid": "28f8575e-6c81-45a8-9471-fcd0e5f8ae9a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1867.279945] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1ac8a10-401e-4325-9c78-f2decb6f9854 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.281831] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.501s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.282192] env[63371]: DEBUG nova.objects.instance [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lazy-loading 'resources' on Instance uuid a083adca-0638-4a39-bd4c-30c64d1c9b0e {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1867.319678] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "1689fc63-3c07-4517-bbef-0011d860e9fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.319972] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.320233] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "1689fc63-3c07-4517-bbef-0011d860e9fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1867.320441] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1867.320602] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1867.322691] env[63371]: INFO nova.compute.manager [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Terminating instance [ 1867.324503] env[63371]: DEBUG nova.compute.manager [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1867.324726] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1867.325572] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7cfbdc9-00ee-4908-af6f-2bfda0c704f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.333963] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1867.334241] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aecf9334-d407-4887-8cb6-7c3cec4858f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.340567] env[63371]: DEBUG oslo_vmware.api [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1867.340567] env[63371]: value = "task-1774960" [ 1867.340567] env[63371]: _type = "Task" [ 1867.340567] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.348678] env[63371]: DEBUG oslo_vmware.api [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774960, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.715685] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774959, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.769232] env[63371]: DEBUG oslo_concurrency.lockutils [req-080aa146-fa03-4a69-ac8a-1a23a9bb232b req-fd3a97c6-a739-4e82-8247-75fca627186d service nova] Releasing lock "refresh_cache-fac8df06-ab04-41ec-a32b-f46a08470a97" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.769550] env[63371]: DEBUG nova.compute.manager [req-080aa146-fa03-4a69-ac8a-1a23a9bb232b req-fd3a97c6-a739-4e82-8247-75fca627186d service nova] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Received event network-vif-deleted-0f111dbf-ca3c-4a52-8de4-ece209ccf945 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1867.852925] env[63371]: DEBUG oslo_vmware.api [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774960, 'name': PowerOffVM_Task, 'duration_secs': 0.340191} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.856981] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1867.857231] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1867.857703] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-76d92260-5a5d-4bdc-8bc3-a5f850f2e5df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.886481] env[63371]: DEBUG nova.network.neutron [req-a5d53355-2795-4c78-81f2-3789059cf090 req-b5a7e9aa-8e48-44ae-a9ab-d5620e513cae service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Updated VIF entry in instance network info cache for port 3856f78a-36e6-49ce-8a81-1e94a9c8f1cc. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1867.886953] env[63371]: DEBUG nova.network.neutron [req-a5d53355-2795-4c78-81f2-3789059cf090 req-b5a7e9aa-8e48-44ae-a9ab-d5620e513cae service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Updating instance_info_cache with network_info: [{"id": "3856f78a-36e6-49ce-8a81-1e94a9c8f1cc", "address": "fa:16:3e:58:11:e1", "network": {"id": "65f8b2cb-f2c8-4fa7-9c31-622644d3eb60", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1211308086-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.177", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35882164a8734563a006675f2ec6ba71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "96d8be6c-b557-4b40-b0f5-838c62a3c904", "external-id": "nsx-vlan-transportzone-144", "segmentation_id": 144, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3856f78a-36", "ovs_interfaceid": "3856f78a-36e6-49ce-8a81-1e94a9c8f1cc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.008534] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07cc4f67-8ced-4bb9-9ecd-6e409822fcec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.016091] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7b3a49-a8da-4e60-9022-39aac3212749 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.047344] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ece66b7-4c78-4c81-aa00-056d9fd48085 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.054904] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c937254f-4a37-4e14-96c3-814e62c94f94 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.068590] env[63371]: DEBUG nova.compute.provider_tree [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1868.216390] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774959, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.294936] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1868.295267] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1868.295499] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleting the datastore file [datastore1] 1689fc63-3c07-4517-bbef-0011d860e9fc {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1868.295838] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96470132-f90c-4729-a85b-e34ece6c455e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.303044] env[63371]: DEBUG oslo_vmware.api [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for the task: (returnval){ [ 1868.303044] env[63371]: value = "task-1774962" [ 1868.303044] env[63371]: _type = "Task" [ 1868.303044] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.310169] env[63371]: DEBUG oslo_vmware.api [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774962, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.390053] env[63371]: DEBUG oslo_concurrency.lockutils [req-a5d53355-2795-4c78-81f2-3789059cf090 req-b5a7e9aa-8e48-44ae-a9ab-d5620e513cae service nova] Releasing lock "refresh_cache-cfa04c51-c077-4f16-ae57-e54d62aac044" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.572059] env[63371]: DEBUG nova.scheduler.client.report [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1868.717662] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1774959, 'name': CreateVM_Task, 'duration_secs': 2.10262} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.718193] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1868.718900] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1868.719076] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1868.719419] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1868.719650] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d7d33e9-d63c-4d20-9bdb-0bc0ad1331f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.725053] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1868.725053] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]527b9889-25d7-1a84-7382-b7b633a77c7e" [ 1868.725053] env[63371]: _type = "Task" [ 1868.725053] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.732487] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527b9889-25d7-1a84-7382-b7b633a77c7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.811968] env[63371]: DEBUG oslo_vmware.api [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Task: {'id': task-1774962, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.432989} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.812261] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1868.812450] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1868.812625] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1868.812794] env[63371]: INFO nova.compute.manager [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Took 1.49 seconds to destroy the instance on the hypervisor. [ 1868.813054] env[63371]: DEBUG oslo.service.loopingcall [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1868.813248] env[63371]: DEBUG nova.compute.manager [-] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1868.813404] env[63371]: DEBUG nova.network.neutron [-] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1869.077425] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.796s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.092730] env[63371]: DEBUG nova.compute.manager [req-cfb41525-9eb7-45e4-a5ca-fdd4e797d8b7 req-63140d10-5fe1-4e9e-970b-b19246de2c90 service nova] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Received event network-vif-deleted-fdb2262d-54b0-4555-939f-39915c982e09 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1869.092906] env[63371]: INFO nova.compute.manager [req-cfb41525-9eb7-45e4-a5ca-fdd4e797d8b7 req-63140d10-5fe1-4e9e-970b-b19246de2c90 service nova] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Neutron deleted interface fdb2262d-54b0-4555-939f-39915c982e09; detaching it from the instance and deleting it from the info cache [ 1869.092981] env[63371]: DEBUG nova.network.neutron [req-cfb41525-9eb7-45e4-a5ca-fdd4e797d8b7 req-63140d10-5fe1-4e9e-970b-b19246de2c90 service nova] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.100125] env[63371]: INFO nova.scheduler.client.report [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted allocations for instance a083adca-0638-4a39-bd4c-30c64d1c9b0e [ 1869.234570] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]527b9889-25d7-1a84-7382-b7b633a77c7e, 'name': SearchDatastore_Task, 'duration_secs': 0.038562} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.234904] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1869.235121] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1869.235363] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1869.235513] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1869.235694] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1869.235958] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c29c98fc-cd68-43c0-8fee-a9a205092997 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.247266] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1869.247452] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1869.248155] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-282278d3-7456-4401-bec7-481e424a0356 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.253384] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1869.253384] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52463f0b-5b1a-f8c2-af96-1ef7f1a2ff50" [ 1869.253384] env[63371]: _type = "Task" [ 1869.253384] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.260874] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52463f0b-5b1a-f8c2-af96-1ef7f1a2ff50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.564375] env[63371]: DEBUG nova.network.neutron [-] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.595257] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93ef3c54-f507-47fd-a8b9-031d6bf20eed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.605269] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74ea2dc-3fe6-4985-a69e-883a35c555e4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.618636] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02d35a92-c5ac-4546-b447-c11eb3464626 tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "a083adca-0638-4a39-bd4c-30c64d1c9b0e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.255s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1869.642393] env[63371]: DEBUG nova.compute.manager [req-cfb41525-9eb7-45e4-a5ca-fdd4e797d8b7 req-63140d10-5fe1-4e9e-970b-b19246de2c90 service nova] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Detach interface failed, port_id=fdb2262d-54b0-4555-939f-39915c982e09, reason: Instance 1689fc63-3c07-4517-bbef-0011d860e9fc could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1869.645226] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.645437] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1869.764283] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52463f0b-5b1a-f8c2-af96-1ef7f1a2ff50, 'name': SearchDatastore_Task, 'duration_secs': 0.044907} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.765166] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6c68dbbf-ee24-4451-adfc-3ef3b94b4591 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.770878] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1869.770878] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52e7bffc-28e3-c044-0877-2b3171327e79" [ 1869.770878] env[63371]: _type = "Task" [ 1869.770878] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.779204] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e7bffc-28e3-c044-0877-2b3171327e79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.067421] env[63371]: INFO nova.compute.manager [-] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Took 1.25 seconds to deallocate network for instance. [ 1870.148413] env[63371]: DEBUG nova.compute.utils [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1870.284418] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "0c9156ea-81c4-4286-a20b-66068a5bce59" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.284737] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "0c9156ea-81c4-4286-a20b-66068a5bce59" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.284993] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "0c9156ea-81c4-4286-a20b-66068a5bce59-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.285252] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "0c9156ea-81c4-4286-a20b-66068a5bce59-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.285464] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "0c9156ea-81c4-4286-a20b-66068a5bce59-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.287425] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52e7bffc-28e3-c044-0877-2b3171327e79, 'name': SearchDatastore_Task, 'duration_secs': 0.03049} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.287983] env[63371]: INFO nova.compute.manager [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Terminating instance [ 1870.289939] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1870.290274] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] fac8df06-ab04-41ec-a32b-f46a08470a97/fac8df06-ab04-41ec-a32b-f46a08470a97.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1870.290972] env[63371]: DEBUG nova.compute.manager [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1870.291380] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1870.291647] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82ed9b44-e0c4-4725-a1c1-416df2b4137e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.294545] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90546dae-aeb3-4c62-8e26-a7a909a69e5d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.301590] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1870.302710] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-541fe5d8-d53f-4889-8291-3d781f86f1cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.304177] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1870.304177] env[63371]: value = "task-1774963" [ 1870.304177] env[63371]: _type = "Task" [ 1870.304177] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.309227] env[63371]: DEBUG oslo_vmware.api [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1870.309227] env[63371]: value = "task-1774964" [ 1870.309227] env[63371]: _type = "Task" [ 1870.309227] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.312226] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774963, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.319549] env[63371]: DEBUG oslo_vmware.api [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774964, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.575289] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.575569] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.575763] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.595906] env[63371]: INFO nova.scheduler.client.report [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Deleted allocations for instance 1689fc63-3c07-4517-bbef-0011d860e9fc [ 1870.652023] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.815359] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774963, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.823614] env[63371]: DEBUG oslo_vmware.api [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774964, 'name': PowerOffVM_Task, 'duration_secs': 0.361178} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.823891] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1870.824068] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1870.824328] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a1e8e99-f5b5-46f9-9789-41b86403c575 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.959227] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1870.959466] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1870.959647] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleting the datastore file [datastore1] 0c9156ea-81c4-4286-a20b-66068a5bce59 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1870.959929] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e583354c-ded9-4248-95f2-5773af012f10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.967214] env[63371]: DEBUG oslo_vmware.api [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for the task: (returnval){ [ 1870.967214] env[63371]: value = "task-1774966" [ 1870.967214] env[63371]: _type = "Task" [ 1870.967214] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.975789] env[63371]: DEBUG oslo_vmware.api [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.104590] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f1e0e07c-81bf-4471-8ca2-a25a917da983 tempest-DeleteServersTestJSON-2052807132 tempest-DeleteServersTestJSON-2052807132-project-member] Lock "1689fc63-3c07-4517-bbef-0011d860e9fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.784s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1871.317226] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774963, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.78003} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.317507] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] fac8df06-ab04-41ec-a32b-f46a08470a97/fac8df06-ab04-41ec-a32b-f46a08470a97.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1871.317767] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1871.318081] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-88c65de1-0147-41cc-853e-295db659d60c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.324586] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1871.324586] env[63371]: value = "task-1774967" [ 1871.324586] env[63371]: _type = "Task" [ 1871.324586] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.332744] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774967, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.477409] env[63371]: DEBUG oslo_vmware.api [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.705152] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.705422] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1871.705659] env[63371]: INFO nova.compute.manager [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Attaching volume 5131a893-86b0-4ed5-aa6d-56a831c0ba5a to /dev/sdb [ 1871.737467] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47dc262-3a43-40b5-ab2d-f8f2252b8cfe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.744920] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7318937f-f045-4d7e-9375-5b5d27c9cd04 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.758233] env[63371]: DEBUG nova.virt.block_device [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Updating existing volume attachment record: 7b55fd5f-6b0f-47c4-b89c-aba3764ca5f5 {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1871.834605] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774967, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.224058} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.834874] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1871.835651] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1554913c-d4fb-463d-8e3b-a29be34d685e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.857017] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] fac8df06-ab04-41ec-a32b-f46a08470a97/fac8df06-ab04-41ec-a32b-f46a08470a97.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1871.857249] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c05e628-4804-4b5f-9dbc-9fa45b67f7c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.876901] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1871.876901] env[63371]: value = "task-1774969" [ 1871.876901] env[63371]: _type = "Task" [ 1871.876901] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1871.884754] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774969, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1871.978257] env[63371]: DEBUG oslo_vmware.api [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Task: {'id': task-1774966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.597428} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.979050] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1871.979050] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1871.979050] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1871.979296] env[63371]: INFO nova.compute.manager [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Took 1.69 seconds to destroy the instance on the hypervisor. [ 1871.979398] env[63371]: DEBUG oslo.service.loopingcall [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1871.979626] env[63371]: DEBUG nova.compute.manager [-] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1871.979745] env[63371]: DEBUG nova.network.neutron [-] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1872.318726] env[63371]: DEBUG nova.compute.manager [req-58f122da-1a8f-49c7-8f45-2b607c7adb3a req-78ea1233-b4a0-4d59-9923-33aaab15c88d service nova] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Received event network-vif-deleted-0ea63013-3179-492e-89dd-074b2ed530c0 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1872.318964] env[63371]: INFO nova.compute.manager [req-58f122da-1a8f-49c7-8f45-2b607c7adb3a req-78ea1233-b4a0-4d59-9923-33aaab15c88d service nova] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Neutron deleted interface 0ea63013-3179-492e-89dd-074b2ed530c0; detaching it from the instance and deleting it from the info cache [ 1872.319100] env[63371]: DEBUG nova.network.neutron [req-58f122da-1a8f-49c7-8f45-2b607c7adb3a req-78ea1233-b4a0-4d59-9923-33aaab15c88d service nova] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.388026] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774969, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1872.797554] env[63371]: DEBUG nova.network.neutron [-] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.821760] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3a60c24-c581-4657-a604-b4c68736eaf5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.832703] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b86e638b-5d84-4d17-a8a5-41064da5f4ad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.873216] env[63371]: DEBUG nova.compute.manager [req-58f122da-1a8f-49c7-8f45-2b607c7adb3a req-78ea1233-b4a0-4d59-9923-33aaab15c88d service nova] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Detach interface failed, port_id=0ea63013-3179-492e-89dd-074b2ed530c0, reason: Instance 0c9156ea-81c4-4286-a20b-66068a5bce59 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1872.887713] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774969, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.299872] env[63371]: INFO nova.compute.manager [-] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Took 1.32 seconds to deallocate network for instance. [ 1873.388274] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774969, 'name': ReconfigVM_Task, 'duration_secs': 1.218703} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.388659] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Reconfigured VM instance instance-00000074 to attach disk [datastore1] fac8df06-ab04-41ec-a32b-f46a08470a97/fac8df06-ab04-41ec-a32b-f46a08470a97.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1873.389346] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3aad2c4c-f66b-4c61-a203-27e8b2f0f02c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.396729] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1873.396729] env[63371]: value = "task-1774971" [ 1873.396729] env[63371]: _type = "Task" [ 1873.396729] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.405104] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774971, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.809466] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1873.809753] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.809973] env[63371]: DEBUG nova.objects.instance [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lazy-loading 'resources' on Instance uuid 0c9156ea-81c4-4286-a20b-66068a5bce59 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1873.907035] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774971, 'name': Rename_Task, 'duration_secs': 0.332095} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.907369] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1873.907682] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d6f4a468-bbb4-4660-8120-152236130c9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.914119] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1873.914119] env[63371]: value = "task-1774972" [ 1873.914119] env[63371]: _type = "Task" [ 1873.914119] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.922200] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774972, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.425109] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774972, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.552023] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d528b5c6-fe07-458e-9478-1c7371bff69f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.560455] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e8aa9d-8ec1-4824-91eb-3703c0c41b7c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.594523] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a32caa3-99a9-4af9-b92d-29c753a13740 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.605687] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625036f6-0be0-424b-8d05-a69f56694600 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.620483] env[63371]: DEBUG nova.compute.provider_tree [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1874.925157] env[63371]: DEBUG oslo_vmware.api [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774972, 'name': PowerOnVM_Task, 'duration_secs': 1.007893} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.925424] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1874.925632] env[63371]: INFO nova.compute.manager [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Took 10.94 seconds to spawn the instance on the hypervisor. [ 1874.927749] env[63371]: DEBUG nova.compute.manager [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1874.927749] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0515eae6-93ce-434e-adfe-6121aa387e14 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.102572] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "interface-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-3a849b63-f519-49c8-92b3-ad93796fcc9b" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.102572] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-3a849b63-f519-49c8-92b3-ad93796fcc9b" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.102572] env[63371]: DEBUG nova.objects.instance [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'flavor' on Instance uuid 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1875.150017] env[63371]: ERROR nova.scheduler.client.report [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] [req-b1dd16bd-a4e4-47d1-a899-b931670d58e6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-b1dd16bd-a4e4-47d1-a899-b931670d58e6"}]} [ 1875.164918] env[63371]: DEBUG nova.scheduler.client.report [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1875.188933] env[63371]: DEBUG nova.scheduler.client.report [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1875.189174] env[63371]: DEBUG nova.compute.provider_tree [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1875.201354] env[63371]: DEBUG nova.scheduler.client.report [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1875.228589] env[63371]: DEBUG nova.scheduler.client.report [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1875.400718] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d1c5cc-b527-484e-83be-5035e8803dfd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.408634] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d257c99b-4f7b-4c88-b224-90f5be078d54 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.446365] env[63371]: INFO nova.compute.manager [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Took 15.84 seconds to build instance. [ 1875.448428] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0ca84d-0854-4975-a4e6-9146cab6f91c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.456076] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22af408d-df79-46f3-a9b4-d32cb977eb2b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.469220] env[63371]: DEBUG nova.compute.provider_tree [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1875.750933] env[63371]: DEBUG nova.objects.instance [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'pci_requests' on Instance uuid 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1875.948337] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7bb3db02-6459-4562-b0ba-4ed9813aa033 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "fac8df06-ab04-41ec-a32b-f46a08470a97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.355s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1875.972448] env[63371]: DEBUG nova.scheduler.client.report [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 163, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1876.255346] env[63371]: DEBUG nova.objects.base [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Object Instance<3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce> lazy-loaded attributes: flavor,pci_requests {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1876.255585] env[63371]: DEBUG nova.network.neutron [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1876.305841] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1876.306124] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368501', 'volume_id': '5131a893-86b0-4ed5-aa6d-56a831c0ba5a', 'name': 'volume-5131a893-86b0-4ed5-aa6d-56a831c0ba5a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'da2e3b05-9cb0-49bb-8945-924e48cf3431', 'attached_at': '', 'detached_at': '', 'volume_id': '5131a893-86b0-4ed5-aa6d-56a831c0ba5a', 'serial': '5131a893-86b0-4ed5-aa6d-56a831c0ba5a'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1876.307077] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfee9fd-4af9-4b96-9026-68622f5b5d38 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.323610] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbc8e3b2-cccf-4cbf-9641-6ac1c96ed52e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.348314] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-5131a893-86b0-4ed5-aa6d-56a831c0ba5a/volume-5131a893-86b0-4ed5-aa6d-56a831c0ba5a.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1876.349929] env[63371]: DEBUG nova.policy [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aacd81490704110b6cc6aba338883a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a5897667b6b47deb7ff5b64f9499f36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1876.351842] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5a3df50-23cd-440b-96de-fc32da26b30a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.370010] env[63371]: DEBUG oslo_vmware.api [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1876.370010] env[63371]: value = "task-1774974" [ 1876.370010] env[63371]: _type = "Task" [ 1876.370010] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.378315] env[63371]: DEBUG oslo_vmware.api [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774974, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.477602] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.668s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.563682] env[63371]: INFO nova.scheduler.client.report [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Deleted allocations for instance 0c9156ea-81c4-4286-a20b-66068a5bce59 [ 1876.880165] env[63371]: DEBUG oslo_vmware.api [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774974, 'name': ReconfigVM_Task, 'duration_secs': 0.375914} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.880452] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-5131a893-86b0-4ed5-aa6d-56a831c0ba5a/volume-5131a893-86b0-4ed5-aa6d-56a831c0ba5a.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1876.885184] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c2a47a3-cb7c-42c7-a4f9-27a45d277677 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.902443] env[63371]: DEBUG oslo_vmware.api [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1876.902443] env[63371]: value = "task-1774975" [ 1876.902443] env[63371]: _type = "Task" [ 1876.902443] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.911179] env[63371]: DEBUG oslo_vmware.api [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774975, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.071679] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9bfce954-1cde-429a-b5de-a59889b3e71a tempest-ServersTestJSON-1162814863 tempest-ServersTestJSON-1162814863-project-member] Lock "0c9156ea-81c4-4286-a20b-66068a5bce59" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.787s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.412268] env[63371]: DEBUG oslo_vmware.api [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774975, 'name': ReconfigVM_Task, 'duration_secs': 0.137057} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.412624] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368501', 'volume_id': '5131a893-86b0-4ed5-aa6d-56a831c0ba5a', 'name': 'volume-5131a893-86b0-4ed5-aa6d-56a831c0ba5a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'da2e3b05-9cb0-49bb-8945-924e48cf3431', 'attached_at': '', 'detached_at': '', 'volume_id': '5131a893-86b0-4ed5-aa6d-56a831c0ba5a', 'serial': '5131a893-86b0-4ed5-aa6d-56a831c0ba5a'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1877.506307] env[63371]: DEBUG nova.compute.manager [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1877.507284] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f5194f-9504-44be-ace3-0a2b6b6ee6c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.921427] env[63371]: DEBUG nova.compute.manager [req-b0009a22-a68c-466f-8226-aedf1a6968ca req-23babbdd-2010-4811-947c-2d3fe60496ba service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Received event network-vif-plugged-3a849b63-f519-49c8-92b3-ad93796fcc9b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1877.921680] env[63371]: DEBUG oslo_concurrency.lockutils [req-b0009a22-a68c-466f-8226-aedf1a6968ca req-23babbdd-2010-4811-947c-2d3fe60496ba service nova] Acquiring lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.921924] env[63371]: DEBUG oslo_concurrency.lockutils [req-b0009a22-a68c-466f-8226-aedf1a6968ca req-23babbdd-2010-4811-947c-2d3fe60496ba service nova] Lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.922143] env[63371]: DEBUG oslo_concurrency.lockutils [req-b0009a22-a68c-466f-8226-aedf1a6968ca req-23babbdd-2010-4811-947c-2d3fe60496ba service nova] Lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.922317] env[63371]: DEBUG nova.compute.manager [req-b0009a22-a68c-466f-8226-aedf1a6968ca req-23babbdd-2010-4811-947c-2d3fe60496ba service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] No waiting events found dispatching network-vif-plugged-3a849b63-f519-49c8-92b3-ad93796fcc9b {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1877.922483] env[63371]: WARNING nova.compute.manager [req-b0009a22-a68c-466f-8226-aedf1a6968ca req-23babbdd-2010-4811-947c-2d3fe60496ba service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Received unexpected event network-vif-plugged-3a849b63-f519-49c8-92b3-ad93796fcc9b for instance with vm_state active and task_state None. [ 1878.019798] env[63371]: INFO nova.compute.manager [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] instance snapshotting [ 1878.022692] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de4f040-285e-41ca-8e4e-c2d3b53fa8b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.048432] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64bc25f-55fd-46f7-b25b-d022552d3140 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.080701] env[63371]: DEBUG nova.network.neutron [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Successfully updated port: 3a849b63-f519-49c8-92b3-ad93796fcc9b {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1878.464014] env[63371]: DEBUG nova.objects.instance [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'flavor' on Instance uuid da2e3b05-9cb0-49bb-8945-924e48cf3431 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1878.564191] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1878.564565] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-94d8cd2a-3ece-49fa-8742-6f91c1ab3c4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.572828] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1878.572828] env[63371]: value = "task-1774976" [ 1878.572828] env[63371]: _type = "Task" [ 1878.572828] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.581973] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.582166] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.582581] env[63371]: DEBUG nova.network.neutron [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1878.583532] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774976, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.970186] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b087da19-a0a9-412d-9f78-824a6ecc9c48 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.264s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.084426] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774976, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.121604] env[63371]: WARNING nova.network.neutron [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] 78c77028-c23a-4160-8b08-d336e8101b3b already exists in list: networks containing: ['78c77028-c23a-4160-8b08-d336e8101b3b']. ignoring it [ 1879.511876] env[63371]: DEBUG nova.network.neutron [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updating instance_info_cache with network_info: [{"id": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "address": "fa:16:3e:7a:b8:42", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39fe8c75-7a", "ovs_interfaceid": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3a849b63-f519-49c8-92b3-ad93796fcc9b", "address": "fa:16:3e:a9:f6:77", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a849b63-f5", "ovs_interfaceid": "3a849b63-f519-49c8-92b3-ad93796fcc9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.584749] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774976, 'name': CreateSnapshot_Task, 'duration_secs': 0.734746} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.585095] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1879.585805] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7de376-ad61-4e07-9dff-439d6c2db1b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.597862] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1879.598095] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1880.018959] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.018959] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1880.019159] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.020280] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11e04bb-aa3f-4488-8522-67f42e20067d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.038844] env[63371]: DEBUG nova.virt.hardware [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1880.039050] env[63371]: DEBUG nova.virt.hardware [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1880.039276] env[63371]: DEBUG nova.virt.hardware [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1880.039464] env[63371]: DEBUG nova.virt.hardware [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1880.039645] env[63371]: DEBUG nova.virt.hardware [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1880.039818] env[63371]: DEBUG nova.virt.hardware [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1880.040037] env[63371]: DEBUG nova.virt.hardware [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1880.040198] env[63371]: DEBUG nova.virt.hardware [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1880.040362] env[63371]: DEBUG nova.virt.hardware [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1880.040523] env[63371]: DEBUG nova.virt.hardware [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1880.040692] env[63371]: DEBUG nova.virt.hardware [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1880.047111] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Reconfiguring VM to attach interface {{(pid=63371) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1880.047492] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6cb7344d-0788-4ef8-b199-a1dbf4a3bc10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.066649] env[63371]: DEBUG oslo_vmware.api [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1880.066649] env[63371]: value = "task-1774977" [ 1880.066649] env[63371]: _type = "Task" [ 1880.066649] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.075883] env[63371]: DEBUG oslo_vmware.api [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774977, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.105171] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1880.106112] env[63371]: DEBUG nova.compute.utils [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1880.107540] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e80785bc-8a04-4d51-ae23-c3746659e449 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.115536] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1880.115536] env[63371]: value = "task-1774978" [ 1880.115536] env[63371]: _type = "Task" [ 1880.115536] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.124336] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.578770] env[63371]: DEBUG oslo_vmware.api [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774977, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.612879] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.014s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.627425] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.081727] env[63371]: DEBUG oslo_vmware.api [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774977, 'name': ReconfigVM_Task, 'duration_secs': 0.657142} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.082465] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.082787] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Reconfigured VM to attach interface {{(pid=63371) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1881.131466] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.336542] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.336782] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.588896] env[63371]: DEBUG oslo_concurrency.lockutils [None req-1b9818e3-3db1-41ab-9e2e-40639cdcf90d tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-3a849b63-f519-49c8-92b3-ad93796fcc9b" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.488s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.627714] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.735890] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1881.736191] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.736437] env[63371]: INFO nova.compute.manager [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Attaching volume 5f60ff6a-450b-4518-8917-11df550c2ac1 to /dev/sdb [ 1881.773222] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bca5e4f-4bbb-4b27-bf5b-0a84d06c818c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.782052] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ab91e5-a4b1-4814-bfc8-5771bd7283ee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.795257] env[63371]: DEBUG nova.virt.block_device [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updating existing volume attachment record: ddfe0f36-afb3-467d-a525-8b15fcbf3e4b {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1881.839695] env[63371]: DEBUG nova.compute.utils [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1882.128627] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.263612] env[63371]: DEBUG nova.compute.manager [req-8a89dec8-526d-4bac-ae43-5ecc891dfbdb req-c0f20785-fbcf-4d5b-975d-0001907cbcfc service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Received event network-changed-3a849b63-f519-49c8-92b3-ad93796fcc9b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1882.263720] env[63371]: DEBUG nova.compute.manager [req-8a89dec8-526d-4bac-ae43-5ecc891dfbdb req-c0f20785-fbcf-4d5b-975d-0001907cbcfc service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Refreshing instance network info cache due to event network-changed-3a849b63-f519-49c8-92b3-ad93796fcc9b. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1882.264024] env[63371]: DEBUG oslo_concurrency.lockutils [req-8a89dec8-526d-4bac-ae43-5ecc891dfbdb req-c0f20785-fbcf-4d5b-975d-0001907cbcfc service nova] Acquiring lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.264249] env[63371]: DEBUG oslo_concurrency.lockutils [req-8a89dec8-526d-4bac-ae43-5ecc891dfbdb req-c0f20785-fbcf-4d5b-975d-0001907cbcfc service nova] Acquired lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.264421] env[63371]: DEBUG nova.network.neutron [req-8a89dec8-526d-4bac-ae43-5ecc891dfbdb req-c0f20785-fbcf-4d5b-975d-0001907cbcfc service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Refreshing network info cache for port 3a849b63-f519-49c8-92b3-ad93796fcc9b {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1882.342187] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1882.629246] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.975097] env[63371]: DEBUG nova.network.neutron [req-8a89dec8-526d-4bac-ae43-5ecc891dfbdb req-c0f20785-fbcf-4d5b-975d-0001907cbcfc service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updated VIF entry in instance network info cache for port 3a849b63-f519-49c8-92b3-ad93796fcc9b. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1882.975537] env[63371]: DEBUG nova.network.neutron [req-8a89dec8-526d-4bac-ae43-5ecc891dfbdb req-c0f20785-fbcf-4d5b-975d-0001907cbcfc service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updating instance_info_cache with network_info: [{"id": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "address": "fa:16:3e:7a:b8:42", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39fe8c75-7a", "ovs_interfaceid": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3a849b63-f519-49c8-92b3-ad93796fcc9b", "address": "fa:16:3e:a9:f6:77", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a849b63-f5", "ovs_interfaceid": "3a849b63-f519-49c8-92b3-ad93796fcc9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.988047] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "interface-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-3a849b63-f519-49c8-92b3-ad93796fcc9b" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1882.988325] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-3a849b63-f519-49c8-92b3-ad93796fcc9b" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.130660] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.478177] env[63371]: DEBUG oslo_concurrency.lockutils [req-8a89dec8-526d-4bac-ae43-5ecc891dfbdb req-c0f20785-fbcf-4d5b-975d-0001907cbcfc service nova] Releasing lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.491024] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.491230] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.492291] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3805dbef-dc72-44e7-9b9c-243947e2b814 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.511266] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80e1d71-6619-44c1-bf4c-7be11dd272ad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.530874] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.531123] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.531350] env[63371]: INFO nova.compute.manager [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Attaching volume bb7e9453-2717-44f0-aa95-77eb1f11cafe to /dev/sdc [ 1883.538181] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Reconfiguring VM to detach interface {{(pid=63371) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1883.538967] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c0ef89a-b19f-435e-8e4f-15397602de8e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.558917] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1883.558917] env[63371]: value = "task-1774982" [ 1883.558917] env[63371]: _type = "Task" [ 1883.558917] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.566748] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.578089] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32f3775-337f-4835-bd29-a84d9153437c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.585357] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073245d3-2c2f-43c2-a9f8-9eb0728f5743 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.598128] env[63371]: DEBUG nova.virt.block_device [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Updating existing volume attachment record: 1924344a-b79a-4172-a9c4-96555090c02d {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1883.628555] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.068598] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.131817] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.569230] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.630063] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.069687] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.130780] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.570552] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.631999] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.070597] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.134756] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774978, 'name': CloneVM_Task, 'duration_secs': 5.945661} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.135063] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Created linked-clone VM from snapshot [ 1886.135775] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eec8981-f8c6-4083-8575-9c4c036e2210 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.142463] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Uploading image c09251a6-35a5-44f4-a2f4-bc980e1b4f59 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1886.169804] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1886.170081] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fd0ca067-3520-4215-81ef-73ed3549a1d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.176499] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1886.176499] env[63371]: value = "task-1774986" [ 1886.176499] env[63371]: _type = "Task" [ 1886.176499] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.183629] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774986, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.344517] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1886.344517] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368505', 'volume_id': '5f60ff6a-450b-4518-8917-11df550c2ac1', 'name': 'volume-5f60ff6a-450b-4518-8917-11df550c2ac1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ff724a9f-5e9a-4683-8eb3-058fb3639ea5', 'attached_at': '', 'detached_at': '', 'volume_id': '5f60ff6a-450b-4518-8917-11df550c2ac1', 'serial': '5f60ff6a-450b-4518-8917-11df550c2ac1'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1886.345022] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fc71132-1d18-4a60-874b-bfa326043a4f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.361065] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc96c0a-be0f-4804-b83d-4b226cd286b8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.387131] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] volume-5f60ff6a-450b-4518-8917-11df550c2ac1/volume-5f60ff6a-450b-4518-8917-11df550c2ac1.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1886.387474] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f813dc41-c09b-4dc3-bb62-2121398b77f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.405948] env[63371]: DEBUG oslo_vmware.api [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1886.405948] env[63371]: value = "task-1774987" [ 1886.405948] env[63371]: _type = "Task" [ 1886.405948] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.413691] env[63371]: DEBUG oslo_vmware.api [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774987, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.572260] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.686514] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774986, 'name': Destroy_Task, 'duration_secs': 0.306505} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.686873] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Destroyed the VM [ 1886.687052] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1886.687312] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-527e9daf-aeae-4e84-90b7-ff8d3458fdfe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.694809] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1886.694809] env[63371]: value = "task-1774988" [ 1886.694809] env[63371]: _type = "Task" [ 1886.694809] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.702550] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774988, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.915401] env[63371]: DEBUG oslo_vmware.api [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774987, 'name': ReconfigVM_Task, 'duration_secs': 0.336043} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.915629] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Reconfigured VM instance instance-0000006f to attach disk [datastore1] volume-5f60ff6a-450b-4518-8917-11df550c2ac1/volume-5f60ff6a-450b-4518-8917-11df550c2ac1.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1886.920271] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42af71cc-915e-4d75-8ea8-d20347f95ae6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.934229] env[63371]: DEBUG oslo_vmware.api [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1886.934229] env[63371]: value = "task-1774989" [ 1886.934229] env[63371]: _type = "Task" [ 1886.934229] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.942363] env[63371]: DEBUG oslo_vmware.api [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774989, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.072546] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.204618] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774988, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.445826] env[63371]: DEBUG oslo_vmware.api [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774989, 'name': ReconfigVM_Task, 'duration_secs': 0.128011} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.446156] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368505', 'volume_id': '5f60ff6a-450b-4518-8917-11df550c2ac1', 'name': 'volume-5f60ff6a-450b-4518-8917-11df550c2ac1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ff724a9f-5e9a-4683-8eb3-058fb3639ea5', 'attached_at': '', 'detached_at': '', 'volume_id': '5f60ff6a-450b-4518-8917-11df550c2ac1', 'serial': '5f60ff6a-450b-4518-8917-11df550c2ac1'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1887.575406] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.706080] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774988, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.911794] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1887.912048] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1888.074630] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.140974] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1888.140974] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368506', 'volume_id': 'bb7e9453-2717-44f0-aa95-77eb1f11cafe', 'name': 'volume-bb7e9453-2717-44f0-aa95-77eb1f11cafe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'da2e3b05-9cb0-49bb-8945-924e48cf3431', 'attached_at': '', 'detached_at': '', 'volume_id': 'bb7e9453-2717-44f0-aa95-77eb1f11cafe', 'serial': 'bb7e9453-2717-44f0-aa95-77eb1f11cafe'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1888.141842] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60dedb86-e709-4966-a0a4-1cec26907cd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.157730] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f24f79-f88b-49d7-89c7-b4e528fd8744 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.184362] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-bb7e9453-2717-44f0-aa95-77eb1f11cafe/volume-bb7e9453-2717-44f0-aa95-77eb1f11cafe.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1888.184606] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38408414-c205-47cd-ab3d-b85e62e6825e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.204670] env[63371]: DEBUG oslo_vmware.api [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774988, 'name': RemoveSnapshot_Task, 'duration_secs': 1.369332} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.205734] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1888.208112] env[63371]: DEBUG oslo_vmware.api [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1888.208112] env[63371]: value = "task-1774990" [ 1888.208112] env[63371]: _type = "Task" [ 1888.208112] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.216735] env[63371]: DEBUG oslo_vmware.api [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774990, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.416106] env[63371]: DEBUG nova.compute.utils [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1888.491981] env[63371]: DEBUG nova.objects.instance [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lazy-loading 'flavor' on Instance uuid ff724a9f-5e9a-4683-8eb3-058fb3639ea5 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1888.574527] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.710015] env[63371]: WARNING nova.compute.manager [None req-d3efae06-8296-4b4c-912a-fb33470d4a15 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Image not found during snapshot: nova.exception.ImageNotFound: Image c09251a6-35a5-44f4-a2f4-bc980e1b4f59 could not be found. [ 1888.720194] env[63371]: DEBUG oslo_vmware.api [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774990, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.918944] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.000449] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4ae8a61f-d93f-49c2-83d6-2f89a7469c1e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.264s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.077037] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.221022] env[63371]: DEBUG oslo_vmware.api [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774990, 'name': ReconfigVM_Task, 'duration_secs': 0.527556} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.222339] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-bb7e9453-2717-44f0-aa95-77eb1f11cafe/volume-bb7e9453-2717-44f0-aa95-77eb1f11cafe.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1889.226179] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78fa537f-3ffe-4161-aec3-2746b7f28bd5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.240799] env[63371]: DEBUG oslo_vmware.api [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1889.240799] env[63371]: value = "task-1774991" [ 1889.240799] env[63371]: _type = "Task" [ 1889.240799] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.248541] env[63371]: DEBUG oslo_vmware.api [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774991, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.283766] env[63371]: INFO nova.compute.manager [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Rescuing [ 1889.284059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1889.284224] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1889.284387] env[63371]: DEBUG nova.network.neutron [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1889.574693] env[63371]: DEBUG oslo_vmware.api [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1774982, 'name': ReconfigVM_Task, 'duration_secs': 5.779421} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.574975] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1889.575198] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Reconfigured VM to detach interface {{(pid=63371) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1889.610745] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "fac8df06-ab04-41ec-a32b-f46a08470a97" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.610980] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "fac8df06-ab04-41ec-a32b-f46a08470a97" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.611207] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "fac8df06-ab04-41ec-a32b-f46a08470a97-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.611390] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "fac8df06-ab04-41ec-a32b-f46a08470a97-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.611573] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "fac8df06-ab04-41ec-a32b-f46a08470a97-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.613597] env[63371]: INFO nova.compute.manager [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Terminating instance [ 1889.615945] env[63371]: DEBUG nova.compute.manager [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1889.616159] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1889.616951] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa72d76f-92ad-43fd-ae3c-0cb208616a9b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.625319] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1889.626999] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf1e6c4f-8459-458d-8240-9369087061eb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.633267] env[63371]: DEBUG oslo_vmware.api [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1889.633267] env[63371]: value = "task-1774992" [ 1889.633267] env[63371]: _type = "Task" [ 1889.633267] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.640449] env[63371]: DEBUG oslo_vmware.api [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774992, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.751166] env[63371]: DEBUG oslo_vmware.api [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774991, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.980121] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.980381] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.980608] env[63371]: INFO nova.compute.manager [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Attaching volume 0ae4dd79-9572-4361-935b-a03dac924bed to /dev/sdb [ 1890.013015] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42acaa90-67be-4360-bf3c-d632f7b06a52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.016066] env[63371]: DEBUG nova.network.neutron [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updating instance_info_cache with network_info: [{"id": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "address": "fa:16:3e:38:f8:9a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaee0e3e-86", "ovs_interfaceid": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1890.021725] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0f9a947-ab1a-4c09-b592-297390668b0d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.034889] env[63371]: DEBUG nova.virt.block_device [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating existing volume attachment record: 80ae08fd-ee95-42e9-9a40-9cfb50a574b9 {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1890.142361] env[63371]: DEBUG oslo_vmware.api [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774992, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.250896] env[63371]: DEBUG oslo_vmware.api [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774991, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.520125] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1890.643458] env[63371]: DEBUG oslo_vmware.api [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774992, 'name': PowerOffVM_Task, 'duration_secs': 0.923385} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.643761] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1890.643938] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1890.644204] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82452254-6f7b-44d4-8ed2-45423f0223cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.719388] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1890.719605] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1890.719827] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Deleting the datastore file [datastore1] fac8df06-ab04-41ec-a32b-f46a08470a97 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1890.720113] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-626ff3aa-917f-4dd9-b9d0-3e9ef895a124 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.726275] env[63371]: DEBUG oslo_vmware.api [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1890.726275] env[63371]: value = "task-1774997" [ 1890.726275] env[63371]: _type = "Task" [ 1890.726275] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.734156] env[63371]: DEBUG oslo_vmware.api [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774997, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.749926] env[63371]: DEBUG oslo_vmware.api [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1774991, 'name': ReconfigVM_Task, 'duration_secs': 1.153974} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.750450] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368506', 'volume_id': 'bb7e9453-2717-44f0-aa95-77eb1f11cafe', 'name': 'volume-bb7e9453-2717-44f0-aa95-77eb1f11cafe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'da2e3b05-9cb0-49bb-8945-924e48cf3431', 'attached_at': '', 'detached_at': '', 'volume_id': 'bb7e9453-2717-44f0-aa95-77eb1f11cafe', 'serial': 'bb7e9453-2717-44f0-aa95-77eb1f11cafe'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1890.856019] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1890.856276] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1890.856461] env[63371]: DEBUG nova.network.neutron [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1891.052865] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1891.053162] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f03ec28-5957-42c0-be83-a51ee4ca29ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.059672] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1891.059672] env[63371]: value = "task-1774998" [ 1891.059672] env[63371]: _type = "Task" [ 1891.059672] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.070200] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774998, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.235632] env[63371]: DEBUG oslo_vmware.api [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1774997, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15978} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.236060] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1891.236319] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1891.236558] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1891.236768] env[63371]: INFO nova.compute.manager [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1891.237090] env[63371]: DEBUG oslo.service.loopingcall [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1891.237763] env[63371]: DEBUG nova.compute.manager [-] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1891.237763] env[63371]: DEBUG nova.network.neutron [-] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1891.486686] env[63371]: DEBUG nova.compute.manager [req-1ca1a060-d4ac-4857-abfc-a4a683662cbe req-621c3e73-a8a6-425a-9e55-8e85c4e00da8 service nova] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Received event network-vif-deleted-28f8575e-6c81-45a8-9471-fcd0e5f8ae9a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1891.486839] env[63371]: INFO nova.compute.manager [req-1ca1a060-d4ac-4857-abfc-a4a683662cbe req-621c3e73-a8a6-425a-9e55-8e85c4e00da8 service nova] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Neutron deleted interface 28f8575e-6c81-45a8-9471-fcd0e5f8ae9a; detaching it from the instance and deleting it from the info cache [ 1891.487025] env[63371]: DEBUG nova.network.neutron [req-1ca1a060-d4ac-4857-abfc-a4a683662cbe req-621c3e73-a8a6-425a-9e55-8e85c4e00da8 service nova] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.569445] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774998, 'name': PowerOffVM_Task, 'duration_secs': 0.190262} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.569721] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1891.573361] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a127a56-7780-4704-9e28-85ea59875b8a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.600709] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d691b61-9b94-4575-a8db-3e6e40a5126d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.628472] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1891.629084] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3fe768fa-6658-4d7a-a263-4cd38229fc76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.635466] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1891.635466] env[63371]: value = "task-1774999" [ 1891.635466] env[63371]: _type = "Task" [ 1891.635466] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.643347] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1774999, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.772590] env[63371]: INFO nova.network.neutron [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Port 3a849b63-f519-49c8-92b3-ad93796fcc9b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1891.772971] env[63371]: DEBUG nova.network.neutron [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updating instance_info_cache with network_info: [{"id": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "address": "fa:16:3e:7a:b8:42", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39fe8c75-7a", "ovs_interfaceid": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.791474] env[63371]: DEBUG nova.objects.instance [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'flavor' on Instance uuid da2e3b05-9cb0-49bb-8945-924e48cf3431 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1891.956879] env[63371]: DEBUG nova.network.neutron [-] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1891.977980] env[63371]: DEBUG nova.compute.manager [req-f3dbc358-51bc-4c6f-8c35-ba0d2aa302ae req-4fc0af45-9def-4593-97fa-8444237d6e49 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Received event network-changed-39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1891.978220] env[63371]: DEBUG nova.compute.manager [req-f3dbc358-51bc-4c6f-8c35-ba0d2aa302ae req-4fc0af45-9def-4593-97fa-8444237d6e49 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Refreshing instance network info cache due to event network-changed-39fe8c75-7aaa-42da-a231-9c68310ef7c8. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1891.978352] env[63371]: DEBUG oslo_concurrency.lockutils [req-f3dbc358-51bc-4c6f-8c35-ba0d2aa302ae req-4fc0af45-9def-4593-97fa-8444237d6e49 service nova] Acquiring lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1891.990110] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-06275eec-c384-49b6-a170-d91d5d3902ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.000731] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00eacffb-f1d5-402e-ac51-227dc8a5dd4c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.032474] env[63371]: DEBUG nova.compute.manager [req-1ca1a060-d4ac-4857-abfc-a4a683662cbe req-621c3e73-a8a6-425a-9e55-8e85c4e00da8 service nova] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Detach interface failed, port_id=28f8575e-6c81-45a8-9471-fcd0e5f8ae9a, reason: Instance fac8df06-ab04-41ec-a32b-f46a08470a97 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1892.146748] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1892.146971] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1892.147255] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1892.147462] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.147715] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1892.147973] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e6d2800-63b9-426d-b422-7f13fa1e88fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.157018] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1892.158026] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1892.158026] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6366372-af64-4cbe-86cf-d75a6fb9828f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.166061] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1892.166061] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5214bba8-b5a0-d067-f126-a8f9b427548b" [ 1892.166061] env[63371]: _type = "Task" [ 1892.166061] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.174056] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5214bba8-b5a0-d067-f126-a8f9b427548b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.276052] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1892.278147] env[63371]: DEBUG oslo_concurrency.lockutils [req-f3dbc358-51bc-4c6f-8c35-ba0d2aa302ae req-4fc0af45-9def-4593-97fa-8444237d6e49 service nova] Acquired lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1892.278340] env[63371]: DEBUG nova.network.neutron [req-f3dbc358-51bc-4c6f-8c35-ba0d2aa302ae req-4fc0af45-9def-4593-97fa-8444237d6e49 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Refreshing network info cache for port 39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1892.297030] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bae9f50-07b8-472c-a129-0144dba6c8e1 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.766s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.414149] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.414428] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.459715] env[63371]: INFO nova.compute.manager [-] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Took 1.22 seconds to deallocate network for instance. [ 1892.664918] env[63371]: DEBUG oslo_concurrency.lockutils [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "interface-e0fa0976-9a73-4b8b-b011-2e15199be5ff-3a849b63-f519-49c8-92b3-ad93796fcc9b" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.665258] env[63371]: DEBUG oslo_concurrency.lockutils [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-e0fa0976-9a73-4b8b-b011-2e15199be5ff-3a849b63-f519-49c8-92b3-ad93796fcc9b" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.665657] env[63371]: DEBUG nova.objects.instance [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'flavor' on Instance uuid e0fa0976-9a73-4b8b-b011-2e15199be5ff {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1892.678573] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5214bba8-b5a0-d067-f126-a8f9b427548b, 'name': SearchDatastore_Task, 'duration_secs': 0.011214} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.679320] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3f5b68c-3e55-4a68-a25c-bca822ab2996 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.685262] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1892.685262] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52300549-bea0-3578-0fe6-2770a5d5ec1e" [ 1892.685262] env[63371]: _type = "Task" [ 1892.685262] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.693385] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52300549-bea0-3578-0fe6-2770a5d5ec1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.780548] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef41404-b108-41e0-bf49-3a5a7c8e86c0 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-3a849b63-f519-49c8-92b3-ad93796fcc9b" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.792s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.917212] env[63371]: INFO nova.compute.manager [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Detaching volume 5131a893-86b0-4ed5-aa6d-56a831c0ba5a [ 1892.951686] env[63371]: INFO nova.virt.block_device [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Attempting to driver detach volume 5131a893-86b0-4ed5-aa6d-56a831c0ba5a from mountpoint /dev/sdb [ 1892.951988] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1892.952207] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368501', 'volume_id': '5131a893-86b0-4ed5-aa6d-56a831c0ba5a', 'name': 'volume-5131a893-86b0-4ed5-aa6d-56a831c0ba5a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'da2e3b05-9cb0-49bb-8945-924e48cf3431', 'attached_at': '', 'detached_at': '', 'volume_id': '5131a893-86b0-4ed5-aa6d-56a831c0ba5a', 'serial': '5131a893-86b0-4ed5-aa6d-56a831c0ba5a'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1892.953098] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c6df266-e20d-4abb-986b-43570e2d4222 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.977140] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.977423] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.977607] env[63371]: DEBUG nova.objects.instance [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lazy-loading 'resources' on Instance uuid fac8df06-ab04-41ec-a32b-f46a08470a97 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1892.979229] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb4d9e9-2773-44cd-a9b6-4428de41907e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.986793] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a119e7-5a93-4b9f-ad51-c8da9df4ffe4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.990604] env[63371]: DEBUG nova.network.neutron [req-f3dbc358-51bc-4c6f-8c35-ba0d2aa302ae req-4fc0af45-9def-4593-97fa-8444237d6e49 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updated VIF entry in instance network info cache for port 39fe8c75-7aaa-42da-a231-9c68310ef7c8. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1892.990933] env[63371]: DEBUG nova.network.neutron [req-f3dbc358-51bc-4c6f-8c35-ba0d2aa302ae req-4fc0af45-9def-4593-97fa-8444237d6e49 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updating instance_info_cache with network_info: [{"id": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "address": "fa:16:3e:7a:b8:42", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap39fe8c75-7a", "ovs_interfaceid": "39fe8c75-7aaa-42da-a231-9c68310ef7c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.014281] env[63371]: DEBUG oslo_concurrency.lockutils [req-f3dbc358-51bc-4c6f-8c35-ba0d2aa302ae req-4fc0af45-9def-4593-97fa-8444237d6e49 service nova] Releasing lock "refresh_cache-3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.015260] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1e82a1-09b7-4dd7-ab81-c1938994a785 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.030648] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] The volume has not been displaced from its original location: [datastore1] volume-5131a893-86b0-4ed5-aa6d-56a831c0ba5a/volume-5131a893-86b0-4ed5-aa6d-56a831c0ba5a.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1893.035941] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1893.036446] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-739f3a29-8ed2-4715-8e82-a5f3dd5d96dd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.054756] env[63371]: DEBUG oslo_vmware.api [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1893.054756] env[63371]: value = "task-1775001" [ 1893.054756] env[63371]: _type = "Task" [ 1893.054756] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.066013] env[63371]: DEBUG oslo_vmware.api [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1775001, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.195878] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52300549-bea0-3578-0fe6-2770a5d5ec1e, 'name': SearchDatastore_Task, 'duration_secs': 0.01019} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.196195] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.196497] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] ff724a9f-5e9a-4683-8eb3-058fb3639ea5/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. {{(pid=63371) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1893.196786] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e29a67c4-af7d-415c-8dce-cabc67a68357 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.203830] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1893.203830] env[63371]: value = "task-1775002" [ 1893.203830] env[63371]: _type = "Task" [ 1893.203830] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.214275] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775002, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.298876] env[63371]: DEBUG nova.objects.instance [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'pci_requests' on Instance uuid e0fa0976-9a73-4b8b-b011-2e15199be5ff {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1893.567330] env[63371]: DEBUG oslo_vmware.api [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1775001, 'name': ReconfigVM_Task, 'duration_secs': 0.250135} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.568011] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1893.576874] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a1efffe-9713-4672-8d5b-fe840822eb2c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.593874] env[63371]: DEBUG oslo_vmware.api [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1893.593874] env[63371]: value = "task-1775003" [ 1893.593874] env[63371]: _type = "Task" [ 1893.593874] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.604235] env[63371]: DEBUG oslo_vmware.api [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1775003, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.689405] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48e5b52-fd66-4b86-bda4-82c8b9387855 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.698137] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18c88f5-9e00-4495-9b4a-03a91cdbd54a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.732984] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d719b13a-5e52-4edc-95f2-2ccc9a055123 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.740475] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775002, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.519646} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.742401] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] ff724a9f-5e9a-4683-8eb3-058fb3639ea5/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk. [ 1893.743182] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-292f4feb-9a54-446b-babb-0b4329220bd8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.746351] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cffbf4-2d6a-4ddc-aa88-797e8e1d513b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.775844] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] ff724a9f-5e9a-4683-8eb3-058fb3639ea5/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1893.783755] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-78bc76a1-4144-41cc-af84-25872fa2a6b7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.796266] env[63371]: DEBUG nova.compute.provider_tree [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1893.801681] env[63371]: DEBUG nova.objects.base [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1893.801883] env[63371]: DEBUG nova.network.neutron [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1893.804357] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1893.804357] env[63371]: value = "task-1775004" [ 1893.804357] env[63371]: _type = "Task" [ 1893.804357] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.812830] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775004, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.865661] env[63371]: DEBUG nova.policy [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3aacd81490704110b6cc6aba338883a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5a5897667b6b47deb7ff5b64f9499f36', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1894.003365] env[63371]: DEBUG nova.compute.manager [req-f4425a27-c9a0-49f6-bbcf-ee6e72e38569 req-56624546-4477-441c-98a0-206ad2ba4683 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Received event network-changed-2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1894.003666] env[63371]: DEBUG nova.compute.manager [req-f4425a27-c9a0-49f6-bbcf-ee6e72e38569 req-56624546-4477-441c-98a0-206ad2ba4683 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Refreshing instance network info cache due to event network-changed-2c2ab976-7609-4012-a826-68288c4f7f64. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1894.003813] env[63371]: DEBUG oslo_concurrency.lockutils [req-f4425a27-c9a0-49f6-bbcf-ee6e72e38569 req-56624546-4477-441c-98a0-206ad2ba4683 service nova] Acquiring lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1894.006207] env[63371]: DEBUG oslo_concurrency.lockutils [req-f4425a27-c9a0-49f6-bbcf-ee6e72e38569 req-56624546-4477-441c-98a0-206ad2ba4683 service nova] Acquired lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1894.006207] env[63371]: DEBUG nova.network.neutron [req-f4425a27-c9a0-49f6-bbcf-ee6e72e38569 req-56624546-4477-441c-98a0-206ad2ba4683 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Refreshing network info cache for port 2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1894.103505] env[63371]: DEBUG oslo_vmware.api [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1775003, 'name': ReconfigVM_Task, 'duration_secs': 0.15698} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.104054] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368501', 'volume_id': '5131a893-86b0-4ed5-aa6d-56a831c0ba5a', 'name': 'volume-5131a893-86b0-4ed5-aa6d-56a831c0ba5a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'da2e3b05-9cb0-49bb-8945-924e48cf3431', 'attached_at': '', 'detached_at': '', 'volume_id': '5131a893-86b0-4ed5-aa6d-56a831c0ba5a', 'serial': '5131a893-86b0-4ed5-aa6d-56a831c0ba5a'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1894.315035] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775004, 'name': ReconfigVM_Task, 'duration_secs': 0.379493} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.315035] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Reconfigured VM instance instance-0000006f to attach disk [datastore1] ff724a9f-5e9a-4683-8eb3-058fb3639ea5/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9-rescue.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1894.315789] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1bd13a0-ed48-47fd-ad54-78a53e51e56c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.343299] env[63371]: DEBUG nova.scheduler.client.report [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 167 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1894.343565] env[63371]: DEBUG nova.compute.provider_tree [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 167 to 168 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1894.343769] env[63371]: DEBUG nova.compute.provider_tree [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1894.347078] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f64ff846-1255-4cce-b14d-4cf1a3289f02 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.357892] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.380s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.365497] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1894.365497] env[63371]: value = "task-1775005" [ 1894.365497] env[63371]: _type = "Task" [ 1894.365497] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.376819] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775005, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.377677] env[63371]: INFO nova.scheduler.client.report [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Deleted allocations for instance fac8df06-ab04-41ec-a32b-f46a08470a97 [ 1894.648356] env[63371]: DEBUG nova.objects.instance [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'flavor' on Instance uuid da2e3b05-9cb0-49bb-8945-924e48cf3431 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1894.735809] env[63371]: DEBUG nova.network.neutron [req-f4425a27-c9a0-49f6-bbcf-ee6e72e38569 req-56624546-4477-441c-98a0-206ad2ba4683 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updated VIF entry in instance network info cache for port 2c2ab976-7609-4012-a826-68288c4f7f64. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1894.736321] env[63371]: DEBUG nova.network.neutron [req-f4425a27-c9a0-49f6-bbcf-ee6e72e38569 req-56624546-4477-441c-98a0-206ad2ba4683 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updating instance_info_cache with network_info: [{"id": "2c2ab976-7609-4012-a826-68288c4f7f64", "address": "fa:16:3e:c1:5a:c9", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c2ab976-76", "ovs_interfaceid": "2c2ab976-7609-4012-a826-68288c4f7f64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.875247] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775005, 'name': ReconfigVM_Task, 'duration_secs': 0.220277} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.875485] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1894.875746] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bbc23702-0988-469e-8201-5580102faf64 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.884962] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1894.884962] env[63371]: value = "task-1775006" [ 1894.884962] env[63371]: _type = "Task" [ 1894.884962] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.885702] env[63371]: DEBUG oslo_concurrency.lockutils [None req-16fc7b0a-d9c1-4677-8a6b-ec306d82e834 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "fac8df06-ab04-41ec-a32b-f46a08470a97" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.274s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1894.894036] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775006, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.078771] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1895.079146] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368508', 'volume_id': '0ae4dd79-9572-4361-935b-a03dac924bed', 'name': 'volume-0ae4dd79-9572-4361-935b-a03dac924bed', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '05f6f94a-c9c4-4737-8b07-77e9c2093497', 'attached_at': '', 'detached_at': '', 'volume_id': '0ae4dd79-9572-4361-935b-a03dac924bed', 'serial': '0ae4dd79-9572-4361-935b-a03dac924bed'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1895.080236] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9034c5b-827e-4fa1-9698-d0387cae5d54 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.097882] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f277bdcd-12e0-41df-a4da-60ef797d803e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.122268] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] volume-0ae4dd79-9572-4361-935b-a03dac924bed/volume-0ae4dd79-9572-4361-935b-a03dac924bed.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1895.122595] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f35281c5-2847-420c-8d2f-80cef13180fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.140939] env[63371]: DEBUG oslo_vmware.api [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1895.140939] env[63371]: value = "task-1775007" [ 1895.140939] env[63371]: _type = "Task" [ 1895.140939] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.149365] env[63371]: DEBUG oslo_vmware.api [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775007, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.238935] env[63371]: DEBUG oslo_concurrency.lockutils [req-f4425a27-c9a0-49f6-bbcf-ee6e72e38569 req-56624546-4477-441c-98a0-206ad2ba4683 service nova] Releasing lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.399598] env[63371]: DEBUG oslo_vmware.api [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775006, 'name': PowerOnVM_Task, 'duration_secs': 0.471117} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.399933] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1895.402690] env[63371]: DEBUG nova.compute.manager [None req-f20fbb92-56fd-4f6a-b491-aedbbd76f997 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1895.403578] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01afc611-ed6f-4037-9fa7-d497db240925 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.586591] env[63371]: DEBUG nova.network.neutron [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Successfully updated port: 3a849b63-f519-49c8-92b3-ad93796fcc9b {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1895.654028] env[63371]: DEBUG oslo_vmware.api [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775007, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.657081] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f5f1ae9b-4452-481f-bc3a-70cc2eebe343 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.243s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.679069] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "37c33e03-30c7-4cf4-99a1-360d892dde2d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.679307] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "37c33e03-30c7-4cf4-99a1-360d892dde2d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.737425] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.737749] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.031981] env[63371]: DEBUG nova.compute.manager [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Received event network-vif-plugged-3a849b63-f519-49c8-92b3-ad93796fcc9b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1896.032193] env[63371]: DEBUG oslo_concurrency.lockutils [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] Acquiring lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.032401] env[63371]: DEBUG oslo_concurrency.lockutils [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] Lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.032569] env[63371]: DEBUG oslo_concurrency.lockutils [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] Lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.032741] env[63371]: DEBUG nova.compute.manager [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] No waiting events found dispatching network-vif-plugged-3a849b63-f519-49c8-92b3-ad93796fcc9b {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1896.032904] env[63371]: WARNING nova.compute.manager [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Received unexpected event network-vif-plugged-3a849b63-f519-49c8-92b3-ad93796fcc9b for instance with vm_state active and task_state None. [ 1896.033088] env[63371]: DEBUG nova.compute.manager [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Received event network-changed-3a849b63-f519-49c8-92b3-ad93796fcc9b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1896.033260] env[63371]: DEBUG nova.compute.manager [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Refreshing instance network info cache due to event network-changed-3a849b63-f519-49c8-92b3-ad93796fcc9b. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1896.033441] env[63371]: DEBUG oslo_concurrency.lockutils [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] Acquiring lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.033628] env[63371]: DEBUG oslo_concurrency.lockutils [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] Acquired lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1896.033792] env[63371]: DEBUG nova.network.neutron [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Refreshing network info cache for port 3a849b63-f519-49c8-92b3-ad93796fcc9b {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1896.088733] env[63371]: DEBUG oslo_concurrency.lockutils [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.153698] env[63371]: DEBUG oslo_vmware.api [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775007, 'name': ReconfigVM_Task, 'duration_secs': 0.548158} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.154069] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfigured VM instance instance-00000070 to attach disk [datastore1] volume-0ae4dd79-9572-4361-935b-a03dac924bed/volume-0ae4dd79-9572-4361-935b-a03dac924bed.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1896.159376] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3a0b5b6-d196-4517-9c5f-b2de8c66103a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.175774] env[63371]: DEBUG oslo_vmware.api [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1896.175774] env[63371]: value = "task-1775008" [ 1896.175774] env[63371]: _type = "Task" [ 1896.175774] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.184047] env[63371]: DEBUG nova.compute.manager [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1896.186137] env[63371]: DEBUG oslo_vmware.api [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775008, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.241406] env[63371]: INFO nova.compute.manager [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Detaching volume bb7e9453-2717-44f0-aa95-77eb1f11cafe [ 1896.273406] env[63371]: INFO nova.virt.block_device [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Attempting to driver detach volume bb7e9453-2717-44f0-aa95-77eb1f11cafe from mountpoint /dev/sdc [ 1896.273734] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1896.273932] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368506', 'volume_id': 'bb7e9453-2717-44f0-aa95-77eb1f11cafe', 'name': 'volume-bb7e9453-2717-44f0-aa95-77eb1f11cafe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'da2e3b05-9cb0-49bb-8945-924e48cf3431', 'attached_at': '', 'detached_at': '', 'volume_id': 'bb7e9453-2717-44f0-aa95-77eb1f11cafe', 'serial': 'bb7e9453-2717-44f0-aa95-77eb1f11cafe'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1896.274849] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1d1fb0-6ece-4dd6-a3dc-95b491e2482a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.298905] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe8633a-8c1b-4b77-bee6-d53b8c70a820 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.305866] env[63371]: INFO nova.compute.manager [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Unrescuing [ 1896.306121] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1896.306274] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquired lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1896.306439] env[63371]: DEBUG nova.network.neutron [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1896.309208] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b3e2e3-163f-4678-a21f-491e03d69654 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.334989] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82d7459-942c-4709-ba8c-8ded5ba7be5d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.350568] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] The volume has not been displaced from its original location: [datastore1] volume-bb7e9453-2717-44f0-aa95-77eb1f11cafe/volume-bb7e9453-2717-44f0-aa95-77eb1f11cafe.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1896.356132] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Reconfiguring VM instance instance-0000006b to detach disk 2002 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1896.356534] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cae1842-3d4f-4de0-99d2-a8b1e9b9c53a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.375467] env[63371]: DEBUG oslo_vmware.api [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1896.375467] env[63371]: value = "task-1775009" [ 1896.375467] env[63371]: _type = "Task" [ 1896.375467] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.383887] env[63371]: DEBUG oslo_vmware.api [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1775009, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1896.689932] env[63371]: DEBUG oslo_vmware.api [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775008, 'name': ReconfigVM_Task, 'duration_secs': 0.153089} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.692148] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368508', 'volume_id': '0ae4dd79-9572-4361-935b-a03dac924bed', 'name': 'volume-0ae4dd79-9572-4361-935b-a03dac924bed', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '05f6f94a-c9c4-4737-8b07-77e9c2093497', 'attached_at': '', 'detached_at': '', 'volume_id': '0ae4dd79-9572-4361-935b-a03dac924bed', 'serial': '0ae4dd79-9572-4361-935b-a03dac924bed'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1896.707191] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.707467] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.709033] env[63371]: INFO nova.compute.claims [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1896.754419] env[63371]: DEBUG nova.network.neutron [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Added VIF to instance network info cache for port 3a849b63-f519-49c8-92b3-ad93796fcc9b. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3546}} [ 1896.754840] env[63371]: DEBUG nova.network.neutron [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updating instance_info_cache with network_info: [{"id": "2c2ab976-7609-4012-a826-68288c4f7f64", "address": "fa:16:3e:c1:5a:c9", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c2ab976-76", "ovs_interfaceid": "2c2ab976-7609-4012-a826-68288c4f7f64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3a849b63-f519-49c8-92b3-ad93796fcc9b", "address": "fa:16:3e:a9:f6:77", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a849b63-f5", "ovs_interfaceid": "3a849b63-f519-49c8-92b3-ad93796fcc9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.885100] env[63371]: DEBUG oslo_vmware.api [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1775009, 'name': ReconfigVM_Task, 'duration_secs': 0.380289} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.885382] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Reconfigured VM instance instance-0000006b to detach disk 2002 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1896.889952] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6ad945e-5ee1-42be-b5df-85489cbfe4f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.904394] env[63371]: DEBUG oslo_vmware.api [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1896.904394] env[63371]: value = "task-1775010" [ 1896.904394] env[63371]: _type = "Task" [ 1896.904394] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.913768] env[63371]: DEBUG oslo_vmware.api [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1775010, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.055834] env[63371]: DEBUG nova.network.neutron [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updating instance_info_cache with network_info: [{"id": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "address": "fa:16:3e:38:f8:9a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaee0e3e-86", "ovs_interfaceid": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1897.257866] env[63371]: DEBUG oslo_concurrency.lockutils [req-89406316-049b-45aa-8cbb-884ac32846c1 req-0e569ff8-a50a-4ca6-ace7-f512c24516ce service nova] Releasing lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1897.258300] env[63371]: DEBUG oslo_concurrency.lockutils [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1897.258489] env[63371]: DEBUG nova.network.neutron [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1897.414820] env[63371]: DEBUG oslo_vmware.api [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1775010, 'name': ReconfigVM_Task, 'duration_secs': 0.259425} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.415097] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368506', 'volume_id': 'bb7e9453-2717-44f0-aa95-77eb1f11cafe', 'name': 'volume-bb7e9453-2717-44f0-aa95-77eb1f11cafe', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'da2e3b05-9cb0-49bb-8945-924e48cf3431', 'attached_at': '', 'detached_at': '', 'volume_id': 'bb7e9453-2717-44f0-aa95-77eb1f11cafe', 'serial': 'bb7e9453-2717-44f0-aa95-77eb1f11cafe'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1897.558638] env[63371]: DEBUG oslo_concurrency.lockutils [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Releasing lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1897.559324] env[63371]: DEBUG nova.objects.instance [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lazy-loading 'flavor' on Instance uuid ff724a9f-5e9a-4683-8eb3-058fb3639ea5 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1897.730516] env[63371]: DEBUG nova.objects.instance [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'flavor' on Instance uuid 05f6f94a-c9c4-4737-8b07-77e9c2093497 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1897.799670] env[63371]: WARNING nova.network.neutron [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] 78c77028-c23a-4160-8b08-d336e8101b3b already exists in list: networks containing: ['78c77028-c23a-4160-8b08-d336e8101b3b']. ignoring it [ 1897.799670] env[63371]: WARNING nova.network.neutron [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] 78c77028-c23a-4160-8b08-d336e8101b3b already exists in list: networks containing: ['78c77028-c23a-4160-8b08-d336e8101b3b']. ignoring it [ 1897.799815] env[63371]: WARNING nova.network.neutron [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] 3a849b63-f519-49c8-92b3-ad93796fcc9b already exists in list: port_ids containing: ['3a849b63-f519-49c8-92b3-ad93796fcc9b']. ignoring it [ 1897.902449] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65117831-097d-49c9-a7b8-cac083d36fda {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.910392] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe817433-f082-45b0-979b-960dc5939d9f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.943163] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1730f064-68e0-460f-ad20-b5862edbf60e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.950844] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc9c6c0-628a-4a30-80b6-feae0c1e4142 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.965707] env[63371]: DEBUG nova.compute.provider_tree [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1897.968075] env[63371]: DEBUG nova.objects.instance [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'flavor' on Instance uuid da2e3b05-9cb0-49bb-8945-924e48cf3431 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1898.066143] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5261160e-9f51-41b0-9110-7d7adc6880c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.091206] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1898.093850] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ecbafa3a-902e-40a2-9585-36f9ae74f61d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.101637] env[63371]: DEBUG oslo_vmware.api [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1898.101637] env[63371]: value = "task-1775011" [ 1898.101637] env[63371]: _type = "Task" [ 1898.101637] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.109741] env[63371]: DEBUG oslo_vmware.api [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775011, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.235927] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6239e3eb-7a27-42e5-8286-05e80ef937bb tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.255s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.291352] env[63371]: DEBUG nova.network.neutron [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updating instance_info_cache with network_info: [{"id": "2c2ab976-7609-4012-a826-68288c4f7f64", "address": "fa:16:3e:c1:5a:c9", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c2ab976-76", "ovs_interfaceid": "2c2ab976-7609-4012-a826-68288c4f7f64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "3a849b63-f519-49c8-92b3-ad93796fcc9b", "address": "fa:16:3e:a9:f6:77", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3a849b63-f5", "ovs_interfaceid": "3a849b63-f519-49c8-92b3-ad93796fcc9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1898.470782] env[63371]: DEBUG nova.scheduler.client.report [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1898.615759] env[63371]: DEBUG oslo_vmware.api [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775011, 'name': PowerOffVM_Task, 'duration_secs': 0.23438} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1898.616587] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1898.622468] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Reconfiguring VM instance instance-0000006f to detach disk 2002 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1898.623062] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58490267-0ff4-4585-80ac-9acefd098367 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.643686] env[63371]: DEBUG oslo_vmware.api [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1898.643686] env[63371]: value = "task-1775012" [ 1898.643686] env[63371]: _type = "Task" [ 1898.643686] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.652600] env[63371]: DEBUG oslo_vmware.api [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775012, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.794438] env[63371]: DEBUG oslo_concurrency.lockutils [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1898.795122] env[63371]: DEBUG oslo_concurrency.lockutils [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1898.795287] env[63371]: DEBUG oslo_concurrency.lockutils [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1898.796201] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25c471d-4d9e-466c-a5a8-1b1bcb25a42b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.814249] env[63371]: DEBUG nova.virt.hardware [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1898.814466] env[63371]: DEBUG nova.virt.hardware [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1898.814648] env[63371]: DEBUG nova.virt.hardware [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1898.814844] env[63371]: DEBUG nova.virt.hardware [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1898.814989] env[63371]: DEBUG nova.virt.hardware [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1898.815152] env[63371]: DEBUG nova.virt.hardware [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1898.815353] env[63371]: DEBUG nova.virt.hardware [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1898.815505] env[63371]: DEBUG nova.virt.hardware [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1898.815666] env[63371]: DEBUG nova.virt.hardware [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1898.815828] env[63371]: DEBUG nova.virt.hardware [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1898.815995] env[63371]: DEBUG nova.virt.hardware [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1898.822160] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Reconfiguring VM to attach interface {{(pid=63371) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1898.822427] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b047519b-7fba-4a3b-9777-565ae836dc25 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.839655] env[63371]: DEBUG oslo_vmware.api [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1898.839655] env[63371]: value = "task-1775013" [ 1898.839655] env[63371]: _type = "Task" [ 1898.839655] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1898.847984] env[63371]: DEBUG oslo_vmware.api [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775013, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1898.977116] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.269s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.977633] env[63371]: DEBUG nova.compute.manager [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1898.980357] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fdccf132-f8c0-42de-815e-ed84556e2cd4 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.243s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.110046] env[63371]: DEBUG nova.compute.manager [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Stashing vm_state: active {{(pid=63371) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1899.155027] env[63371]: DEBUG oslo_vmware.api [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775012, 'name': ReconfigVM_Task, 'duration_secs': 0.251939} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.155154] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Reconfigured VM instance instance-0000006f to detach disk 2002 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1899.155242] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1899.156336] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4a346bc-5689-4453-8b93-e876c8d538d0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.162043] env[63371]: DEBUG oslo_vmware.api [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1899.162043] env[63371]: value = "task-1775014" [ 1899.162043] env[63371]: _type = "Task" [ 1899.162043] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.172012] env[63371]: DEBUG oslo_vmware.api [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775014, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.349189] env[63371]: DEBUG oslo_vmware.api [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775013, 'name': ReconfigVM_Task, 'duration_secs': 0.502962} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.349684] env[63371]: DEBUG oslo_concurrency.lockutils [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1899.349897] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Reconfigured VM to attach interface {{(pid=63371) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1899.482796] env[63371]: DEBUG nova.compute.utils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1899.484508] env[63371]: DEBUG nova.compute.manager [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1899.484693] env[63371]: DEBUG nova.network.neutron [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1899.532836] env[63371]: DEBUG nova.policy [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '09b4f1693ef54996899c199362970fe3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '026682964c784968a24e654531c14aa9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1899.631970] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1899.632840] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1899.671701] env[63371]: DEBUG oslo_vmware.api [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775014, 'name': PowerOnVM_Task, 'duration_secs': 0.405586} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1899.671977] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1899.673145] env[63371]: DEBUG nova.compute.manager [None req-0cc848ef-2875-45f5-8e30-1cfa1dc20c2f tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1899.673967] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb19e27-eea9-49d2-8179-122926a2ce7d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.840084] env[63371]: DEBUG nova.network.neutron [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Successfully created port: 096fb9a4-ccdf-4930-a5cc-b5af3b649443 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1899.854919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-357fde54-01bc-464b-ba04-0a51629fee03 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-e0fa0976-9a73-4b8b-b011-2e15199be5ff-3a849b63-f519-49c8-92b3-ad93796fcc9b" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.189s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.988045] env[63371]: DEBUG nova.compute.manager [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1900.125401] env[63371]: DEBUG oslo_concurrency.lockutils [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.125684] env[63371]: DEBUG oslo_concurrency.lockutils [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.125910] env[63371]: DEBUG oslo_concurrency.lockutils [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "da2e3b05-9cb0-49bb-8945-924e48cf3431-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1900.126534] env[63371]: DEBUG oslo_concurrency.lockutils [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.126730] env[63371]: DEBUG oslo_concurrency.lockutils [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.128713] env[63371]: INFO nova.compute.manager [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Terminating instance [ 1900.130318] env[63371]: DEBUG nova.compute.manager [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1900.130513] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1900.131349] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ef9664-8b5c-4e5c-8fee-ff3c9ca5bb12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.136738] env[63371]: INFO nova.compute.claims [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1900.142924] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1900.143373] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7ba02897-a42a-4cd7-abdb-d733e49b5415 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.149347] env[63371]: DEBUG oslo_vmware.api [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1900.149347] env[63371]: value = "task-1775015" [ 1900.149347] env[63371]: _type = "Task" [ 1900.149347] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.157558] env[63371]: DEBUG oslo_vmware.api [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1775015, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.646257] env[63371]: INFO nova.compute.resource_tracker [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating resource usage from migration 14f006ac-29e5-4cd6-9ce5-48006f177492 [ 1900.659290] env[63371]: DEBUG oslo_vmware.api [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1775015, 'name': PowerOffVM_Task, 'duration_secs': 0.186798} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.659591] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1900.659792] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1900.660073] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ae84c35c-7e1e-4ec9-b93e-4e60861f7999 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.737821] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1900.738074] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1900.738222] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Deleting the datastore file [datastore1] da2e3b05-9cb0-49bb-8945-924e48cf3431 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1900.738478] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8574d9f8-facf-4bed-9a3a-7720bb87a4a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.746080] env[63371]: DEBUG oslo_vmware.api [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for the task: (returnval){ [ 1900.746080] env[63371]: value = "task-1775017" [ 1900.746080] env[63371]: _type = "Task" [ 1900.746080] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.756506] env[63371]: DEBUG oslo_vmware.api [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1775017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.833268] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f653fe0-b5d8-4484-865f-8d0971317964 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.841498] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce60cd2f-1f94-4b01-a7d1-2b0d1487c865 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.885358] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac124e79-4028-4bfc-81b5-6a643dc4744e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.896398] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03795996-e951-4a14-b6cf-5343bf0e75ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.911374] env[63371]: DEBUG nova.compute.provider_tree [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1900.997127] env[63371]: DEBUG nova.compute.manager [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1901.027792] env[63371]: DEBUG nova.virt.hardware [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1901.028068] env[63371]: DEBUG nova.virt.hardware [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1901.028263] env[63371]: DEBUG nova.virt.hardware [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1901.028454] env[63371]: DEBUG nova.virt.hardware [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1901.028599] env[63371]: DEBUG nova.virt.hardware [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1901.028743] env[63371]: DEBUG nova.virt.hardware [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1901.028943] env[63371]: DEBUG nova.virt.hardware [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1901.029123] env[63371]: DEBUG nova.virt.hardware [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1901.029291] env[63371]: DEBUG nova.virt.hardware [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1901.029471] env[63371]: DEBUG nova.virt.hardware [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1901.029716] env[63371]: DEBUG nova.virt.hardware [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1901.030600] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f72c660d-937e-4b3d-8336-1bd1af355f10 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.038581] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bdb92bc-2b3f-4ef3-baaf-bcccb912b8f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.089180] env[63371]: DEBUG nova.compute.manager [req-96f03511-6f0e-4f40-9bfb-654a9200ee0d req-5b84cbcd-68d8-4c2b-9fba-67635665bdcd service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Received event network-changed-baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1901.089534] env[63371]: DEBUG nova.compute.manager [req-96f03511-6f0e-4f40-9bfb-654a9200ee0d req-5b84cbcd-68d8-4c2b-9fba-67635665bdcd service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Refreshing instance network info cache due to event network-changed-baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1901.089706] env[63371]: DEBUG oslo_concurrency.lockutils [req-96f03511-6f0e-4f40-9bfb-654a9200ee0d req-5b84cbcd-68d8-4c2b-9fba-67635665bdcd service nova] Acquiring lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.089849] env[63371]: DEBUG oslo_concurrency.lockutils [req-96f03511-6f0e-4f40-9bfb-654a9200ee0d req-5b84cbcd-68d8-4c2b-9fba-67635665bdcd service nova] Acquired lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.089914] env[63371]: DEBUG nova.network.neutron [req-96f03511-6f0e-4f40-9bfb-654a9200ee0d req-5b84cbcd-68d8-4c2b-9fba-67635665bdcd service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Refreshing network info cache for port baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1901.257711] env[63371]: DEBUG oslo_vmware.api [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Task: {'id': task-1775017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15772} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.257859] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1901.258060] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1901.258235] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1901.258405] env[63371]: INFO nova.compute.manager [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1901.258642] env[63371]: DEBUG oslo.service.loopingcall [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1901.258834] env[63371]: DEBUG nova.compute.manager [-] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1901.259041] env[63371]: DEBUG nova.network.neutron [-] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1901.418421] env[63371]: DEBUG nova.scheduler.client.report [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1901.439318] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "interface-e0fa0976-9a73-4b8b-b011-2e15199be5ff-3a849b63-f519-49c8-92b3-ad93796fcc9b" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.439578] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-e0fa0976-9a73-4b8b-b011-2e15199be5ff-3a849b63-f519-49c8-92b3-ad93796fcc9b" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.447111] env[63371]: DEBUG nova.network.neutron [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Successfully updated port: 096fb9a4-ccdf-4930-a5cc-b5af3b649443 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1901.784807] env[63371]: DEBUG oslo_concurrency.lockutils [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.784807] env[63371]: DEBUG oslo_concurrency.lockutils [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.784807] env[63371]: DEBUG oslo_concurrency.lockutils [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1901.784807] env[63371]: DEBUG oslo_concurrency.lockutils [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1901.784807] env[63371]: DEBUG oslo_concurrency.lockutils [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.785271] env[63371]: INFO nova.compute.manager [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Terminating instance [ 1901.789830] env[63371]: DEBUG nova.compute.manager [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1901.790417] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1901.791612] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6a320a-fb4a-497d-927a-eb3f0827a529 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.804753] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1901.804753] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-198d09a3-bd14-486e-8720-0873ec8054b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.806631] env[63371]: DEBUG nova.compute.manager [req-9ac609d1-b1ee-4ed6-a1ac-6f83d0afb081 req-2233950b-960e-4071-81f7-cf6deff42be0 service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Received event network-vif-deleted-a22c781d-8374-4914-8e01-d61b8df475a7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1901.806918] env[63371]: INFO nova.compute.manager [req-9ac609d1-b1ee-4ed6-a1ac-6f83d0afb081 req-2233950b-960e-4071-81f7-cf6deff42be0 service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Neutron deleted interface a22c781d-8374-4914-8e01-d61b8df475a7; detaching it from the instance and deleting it from the info cache [ 1901.808113] env[63371]: DEBUG nova.network.neutron [req-9ac609d1-b1ee-4ed6-a1ac-6f83d0afb081 req-2233950b-960e-4071-81f7-cf6deff42be0 service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.815475] env[63371]: DEBUG oslo_vmware.api [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1901.815475] env[63371]: value = "task-1775018" [ 1901.815475] env[63371]: _type = "Task" [ 1901.815475] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.826129] env[63371]: DEBUG oslo_vmware.api [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775018, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.877933] env[63371]: DEBUG nova.network.neutron [req-96f03511-6f0e-4f40-9bfb-654a9200ee0d req-5b84cbcd-68d8-4c2b-9fba-67635665bdcd service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updated VIF entry in instance network info cache for port baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1901.877933] env[63371]: DEBUG nova.network.neutron [req-96f03511-6f0e-4f40-9bfb-654a9200ee0d req-5b84cbcd-68d8-4c2b-9fba-67635665bdcd service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updating instance_info_cache with network_info: [{"id": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "address": "fa:16:3e:38:f8:9a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaee0e3e-86", "ovs_interfaceid": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1901.924039] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.291s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1901.924248] env[63371]: INFO nova.compute.manager [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Migrating [ 1901.947551] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.947551] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.948710] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e25cfa6-3007-4561-9622-b13c59a7a7a2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.952599] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "refresh_cache-37c33e03-30c7-4cf4-99a1-360d892dde2d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1901.952599] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquired lock "refresh_cache-37c33e03-30c7-4cf4-99a1-360d892dde2d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1901.952806] env[63371]: DEBUG nova.network.neutron [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1901.984836] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba49f435-8450-478b-878b-218dff429e74 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.023114] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Reconfiguring VM to detach interface {{(pid=63371) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1902.023968] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0c6215b-ef8d-4ccb-9f43-cbce34084245 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.043471] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1902.043471] env[63371]: value = "task-1775019" [ 1902.043471] env[63371]: _type = "Task" [ 1902.043471] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.054299] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.286086] env[63371]: DEBUG nova.network.neutron [-] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.311292] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2b6921ae-d39b-4eb5-a03a-181fc872fcd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.324343] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94467d3a-10bf-47fa-bfad-efaa1e2b0fd2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.337663] env[63371]: DEBUG oslo_vmware.api [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775018, 'name': PowerOffVM_Task, 'duration_secs': 0.229249} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.338318] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1902.338547] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1902.338820] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe3cec22-325d-4259-860b-15f08b92c124 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.360028] env[63371]: DEBUG nova.compute.manager [req-9ac609d1-b1ee-4ed6-a1ac-6f83d0afb081 req-2233950b-960e-4071-81f7-cf6deff42be0 service nova] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Detach interface failed, port_id=a22c781d-8374-4914-8e01-d61b8df475a7, reason: Instance da2e3b05-9cb0-49bb-8945-924e48cf3431 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1902.380392] env[63371]: DEBUG oslo_concurrency.lockutils [req-96f03511-6f0e-4f40-9bfb-654a9200ee0d req-5b84cbcd-68d8-4c2b-9fba-67635665bdcd service nova] Releasing lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1902.411344] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1902.411565] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1902.411745] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleting the datastore file [datastore1] 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1902.412030] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-769d6e61-e8c0-44d5-ad3e-85e7f6b829a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.418337] env[63371]: DEBUG oslo_vmware.api [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1902.418337] env[63371]: value = "task-1775021" [ 1902.418337] env[63371]: _type = "Task" [ 1902.418337] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.426820] env[63371]: DEBUG oslo_vmware.api [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.440927] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.441164] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.441397] env[63371]: DEBUG nova.network.neutron [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1902.485956] env[63371]: DEBUG nova.network.neutron [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1902.556418] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.587053] env[63371]: DEBUG oslo_concurrency.lockutils [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "9885de9e-c640-4d82-a47a-980988d89deb" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1902.587348] env[63371]: DEBUG oslo_concurrency.lockutils [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "9885de9e-c640-4d82-a47a-980988d89deb" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1902.636976] env[63371]: DEBUG nova.network.neutron [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Updating instance_info_cache with network_info: [{"id": "096fb9a4-ccdf-4930-a5cc-b5af3b649443", "address": "fa:16:3e:81:0c:d3", "network": {"id": "37c74e18-5c2a-4df3-b429-8a4fb9f29cc0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1560662466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "026682964c784968a24e654531c14aa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096fb9a4-cc", "ovs_interfaceid": "096fb9a4-ccdf-4930-a5cc-b5af3b649443", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1902.788632] env[63371]: INFO nova.compute.manager [-] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Took 1.53 seconds to deallocate network for instance. [ 1902.929890] env[63371]: DEBUG oslo_vmware.api [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775021, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141757} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.930179] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1902.930350] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1902.930492] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1902.930669] env[63371]: INFO nova.compute.manager [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1902.930910] env[63371]: DEBUG oslo.service.loopingcall [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1902.931119] env[63371]: DEBUG nova.compute.manager [-] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1902.931214] env[63371]: DEBUG nova.network.neutron [-] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1903.054494] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.090579] env[63371]: INFO nova.compute.manager [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Detaching volume 74095c08-847f-4b4a-b107-0d7acbea84a7 [ 1903.121990] env[63371]: DEBUG nova.compute.manager [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Received event network-changed-baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1903.122265] env[63371]: DEBUG nova.compute.manager [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Refreshing instance network info cache due to event network-changed-baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1903.122527] env[63371]: DEBUG oslo_concurrency.lockutils [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] Acquiring lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.122680] env[63371]: DEBUG oslo_concurrency.lockutils [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] Acquired lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.122839] env[63371]: DEBUG nova.network.neutron [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Refreshing network info cache for port baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1903.138125] env[63371]: INFO nova.virt.block_device [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Attempting to driver detach volume 74095c08-847f-4b4a-b107-0d7acbea84a7 from mountpoint /dev/sdb [ 1903.138125] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1903.138125] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368492', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'name': 'volume-74095c08-847f-4b4a-b107-0d7acbea84a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9885de9e-c640-4d82-a47a-980988d89deb', 'attached_at': '', 'detached_at': '', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'serial': '74095c08-847f-4b4a-b107-0d7acbea84a7'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1903.138695] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f880f7e-a388-4dd8-a656-89753ca95d18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.141775] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Releasing lock "refresh_cache-37c33e03-30c7-4cf4-99a1-360d892dde2d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.142066] env[63371]: DEBUG nova.compute.manager [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Instance network_info: |[{"id": "096fb9a4-ccdf-4930-a5cc-b5af3b649443", "address": "fa:16:3e:81:0c:d3", "network": {"id": "37c74e18-5c2a-4df3-b429-8a4fb9f29cc0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1560662466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "026682964c784968a24e654531c14aa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096fb9a4-cc", "ovs_interfaceid": "096fb9a4-ccdf-4930-a5cc-b5af3b649443", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1903.142680] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:0c:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1e7a4976-597e-4636-990e-6062b5faadee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '096fb9a4-ccdf-4930-a5cc-b5af3b649443', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1903.150239] env[63371]: DEBUG oslo.service.loopingcall [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1903.151256] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1903.151787] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a126b939-7a8b-4acb-b6c8-656ea36441f5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.186379] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d6bd83-ce06-40cd-bfe4-8017507cd90c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.190507] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1903.190507] env[63371]: value = "task-1775022" [ 1903.190507] env[63371]: _type = "Task" [ 1903.190507] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.195777] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a7ddcd-c42f-44d0-94a9-21bc156fb41a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.200675] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775022, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.220748] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e137b05c-6a7a-4cd3-93b8-a06e2b74ed52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.235663] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] The volume has not been displaced from its original location: [datastore1] volume-74095c08-847f-4b4a-b107-0d7acbea84a7/volume-74095c08-847f-4b4a-b107-0d7acbea84a7.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1903.240743] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1903.241096] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df6d48a2-1f23-4b50-97fb-eb51e88ecb61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.258499] env[63371]: DEBUG nova.network.neutron [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance_info_cache with network_info: [{"id": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "address": "fa:16:3e:b6:f8:d2", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a88ea10-92", "ovs_interfaceid": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.262077] env[63371]: DEBUG oslo_vmware.api [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1903.262077] env[63371]: value = "task-1775023" [ 1903.262077] env[63371]: _type = "Task" [ 1903.262077] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.272207] env[63371]: DEBUG oslo_vmware.api [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775023, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.295696] env[63371]: DEBUG oslo_concurrency.lockutils [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1903.295696] env[63371]: DEBUG oslo_concurrency.lockutils [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.295897] env[63371]: DEBUG nova.objects.instance [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lazy-loading 'resources' on Instance uuid da2e3b05-9cb0-49bb-8945-924e48cf3431 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1903.554638] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.700477] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775022, 'name': CreateVM_Task, 'duration_secs': 0.363041} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.700668] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1903.701351] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.701524] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.701854] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1903.702113] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c4874a9-f7da-48e8-8b89-3b537d6004e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.706966] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1903.706966] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520344e3-27a7-c173-fe7c-1e2c17988aa3" [ 1903.706966] env[63371]: _type = "Task" [ 1903.706966] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.715307] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520344e3-27a7-c173-fe7c-1e2c17988aa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.765694] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.771812] env[63371]: DEBUG oslo_vmware.api [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775023, 'name': ReconfigVM_Task, 'duration_secs': 0.277428} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.772073] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1903.776642] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f63dc0ee-52cb-4c92-9e9e-7810ab31acc4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.795311] env[63371]: DEBUG oslo_vmware.api [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1903.795311] env[63371]: value = "task-1775024" [ 1903.795311] env[63371]: _type = "Task" [ 1903.795311] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.805753] env[63371]: DEBUG oslo_vmware.api [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775024, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.884765] env[63371]: DEBUG nova.network.neutron [-] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1903.915634] env[63371]: DEBUG nova.network.neutron [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updated VIF entry in instance network info cache for port baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1903.916016] env[63371]: DEBUG nova.network.neutron [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updating instance_info_cache with network_info: [{"id": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "address": "fa:16:3e:38:f8:9a", "network": {"id": "3d3cf46b-7ed3-46e8-98f4-beee21f1c9dd", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1413439664-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.180", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5942787fa9584e8fbf5ddd459907ce5d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbaee0e3e-86", "ovs_interfaceid": "baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.004630] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61502872-03f2-4904-926a-d43678b1c4c1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.012446] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe39f86-7f5e-405c-a2f7-2e4b26ade051 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.042878] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33067e4-e5db-47c3-bf9f-6bd744ef2c45 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.052659] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645457b3-afe0-4f3b-84b9-a625da957198 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.059270] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.068736] env[63371]: DEBUG nova.compute.provider_tree [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1904.218492] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520344e3-27a7-c173-fe7c-1e2c17988aa3, 'name': SearchDatastore_Task, 'duration_secs': 0.00995} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.218756] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.219028] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1904.219240] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.219387] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.219562] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1904.219824] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-372112bd-0ec7-408e-aaaa-8ddc5fc34079 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.228993] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1904.229273] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1904.229935] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbab289c-9be2-4dfd-9e5b-f37147a1308f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.235648] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1904.235648] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5208a00c-f35f-f5d4-9a8a-b38bf43e367f" [ 1904.235648] env[63371]: _type = "Task" [ 1904.235648] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.243087] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5208a00c-f35f-f5d4-9a8a-b38bf43e367f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.307508] env[63371]: DEBUG oslo_vmware.api [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775024, 'name': ReconfigVM_Task, 'duration_secs': 0.148485} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.307907] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368492', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'name': 'volume-74095c08-847f-4b4a-b107-0d7acbea84a7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '9885de9e-c640-4d82-a47a-980988d89deb', 'attached_at': '', 'detached_at': '', 'volume_id': '74095c08-847f-4b4a-b107-0d7acbea84a7', 'serial': '74095c08-847f-4b4a-b107-0d7acbea84a7'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1904.388364] env[63371]: INFO nova.compute.manager [-] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Took 1.46 seconds to deallocate network for instance. [ 1904.420859] env[63371]: DEBUG oslo_concurrency.lockutils [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] Releasing lock "refresh_cache-ff724a9f-5e9a-4683-8eb3-058fb3639ea5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.421211] env[63371]: DEBUG nova.compute.manager [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Received event network-vif-plugged-096fb9a4-ccdf-4930-a5cc-b5af3b649443 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1904.421462] env[63371]: DEBUG oslo_concurrency.lockutils [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] Acquiring lock "37c33e03-30c7-4cf4-99a1-360d892dde2d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.421729] env[63371]: DEBUG oslo_concurrency.lockutils [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] Lock "37c33e03-30c7-4cf4-99a1-360d892dde2d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.421942] env[63371]: DEBUG oslo_concurrency.lockutils [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] Lock "37c33e03-30c7-4cf4-99a1-360d892dde2d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1904.422173] env[63371]: DEBUG nova.compute.manager [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] No waiting events found dispatching network-vif-plugged-096fb9a4-ccdf-4930-a5cc-b5af3b649443 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1904.422395] env[63371]: WARNING nova.compute.manager [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Received unexpected event network-vif-plugged-096fb9a4-ccdf-4930-a5cc-b5af3b649443 for instance with vm_state building and task_state spawning. [ 1904.422611] env[63371]: DEBUG nova.compute.manager [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Received event network-changed-096fb9a4-ccdf-4930-a5cc-b5af3b649443 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1904.422824] env[63371]: DEBUG nova.compute.manager [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Refreshing instance network info cache due to event network-changed-096fb9a4-ccdf-4930-a5cc-b5af3b649443. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1904.423084] env[63371]: DEBUG oslo_concurrency.lockutils [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] Acquiring lock "refresh_cache-37c33e03-30c7-4cf4-99a1-360d892dde2d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.423273] env[63371]: DEBUG oslo_concurrency.lockutils [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] Acquired lock "refresh_cache-37c33e03-30c7-4cf4-99a1-360d892dde2d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.423485] env[63371]: DEBUG nova.network.neutron [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Refreshing network info cache for port 096fb9a4-ccdf-4930-a5cc-b5af3b649443 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1904.556306] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.571649] env[63371]: DEBUG nova.scheduler.client.report [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1904.746145] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5208a00c-f35f-f5d4-9a8a-b38bf43e367f, 'name': SearchDatastore_Task, 'duration_secs': 0.008314} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.746927] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49806b42-05ba-4448-a84e-6f3b2ad7b0e8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.751867] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1904.751867] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52449e6e-4647-c45c-69e9-bfb582393e14" [ 1904.751867] env[63371]: _type = "Task" [ 1904.751867] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.759936] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52449e6e-4647-c45c-69e9-bfb582393e14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.855549] env[63371]: DEBUG nova.objects.instance [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lazy-loading 'flavor' on Instance uuid 9885de9e-c640-4d82-a47a-980988d89deb {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1904.895718] env[63371]: DEBUG oslo_concurrency.lockutils [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.058652] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.075968] env[63371]: DEBUG oslo_concurrency.lockutils [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.780s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.078028] env[63371]: DEBUG oslo_concurrency.lockutils [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.182s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.078512] env[63371]: DEBUG nova.objects.instance [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'resources' on Instance uuid 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1905.101485] env[63371]: INFO nova.scheduler.client.report [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Deleted allocations for instance da2e3b05-9cb0-49bb-8945-924e48cf3431 [ 1905.145544] env[63371]: DEBUG nova.network.neutron [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Updated VIF entry in instance network info cache for port 096fb9a4-ccdf-4930-a5cc-b5af3b649443. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1905.145544] env[63371]: DEBUG nova.network.neutron [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Updating instance_info_cache with network_info: [{"id": "096fb9a4-ccdf-4930-a5cc-b5af3b649443", "address": "fa:16:3e:81:0c:d3", "network": {"id": "37c74e18-5c2a-4df3-b429-8a4fb9f29cc0", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1560662466-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "026682964c784968a24e654531c14aa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1e7a4976-597e-4636-990e-6062b5faadee", "external-id": "nsx-vlan-transportzone-847", "segmentation_id": 847, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap096fb9a4-cc", "ovs_interfaceid": "096fb9a4-ccdf-4930-a5cc-b5af3b649443", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1905.153158] env[63371]: DEBUG nova.compute.manager [req-8eda186e-cd3b-412a-93fb-3dfe08cfc5c5 req-26d68d51-6b2b-4c23-bd56-8f6416da4c0a service nova] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Received event network-vif-deleted-45c89cd7-4637-40af-9652-42cad1269c7e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1905.263258] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52449e6e-4647-c45c-69e9-bfb582393e14, 'name': SearchDatastore_Task, 'duration_secs': 0.010254} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.263526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.263801] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 37c33e03-30c7-4cf4-99a1-360d892dde2d/37c33e03-30c7-4cf4-99a1-360d892dde2d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1905.264124] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d92eb7a5-b2e1-4707-970a-be0bd40a28ad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.271479] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1905.271479] env[63371]: value = "task-1775025" [ 1905.271479] env[63371]: _type = "Task" [ 1905.271479] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.281803] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775025, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.283157] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a53d9e-5384-4d3d-839b-394e0a350aeb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.303630] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance '05f6f94a-c9c4-4737-8b07-77e9c2093497' progress to 0 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1905.335596] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "cfa04c51-c077-4f16-ae57-e54d62aac044" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.335826] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.558865] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.612153] env[63371]: DEBUG oslo_concurrency.lockutils [None req-201ae385-0a86-4af9-aea3-8c935b8c0516 tempest-AttachVolumeTestJSON-810026873 tempest-AttachVolumeTestJSON-810026873-project-member] Lock "da2e3b05-9cb0-49bb-8945-924e48cf3431" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.486s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.648402] env[63371]: DEBUG oslo_concurrency.lockutils [req-07265c0f-9c9c-4f80-a6a8-20faa5270263 req-c3f47b8d-7d8b-4e5a-bfaf-0939776248db service nova] Releasing lock "refresh_cache-37c33e03-30c7-4cf4-99a1-360d892dde2d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.769511] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2183db2b-1e9d-4933-ab81-b0ad5129c1b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.784518] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0240a415-e09a-45de-9b01-1671a14b41dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.787530] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775025, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500223} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.787788] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 37c33e03-30c7-4cf4-99a1-360d892dde2d/37c33e03-30c7-4cf4-99a1-360d892dde2d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1905.787998] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1905.788594] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-edd38e12-5bed-4d21-93d9-ca3bd64f352a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.817654] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1905.819363] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-951de638-44bb-41bd-b4cf-942dd3cf76df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.821554] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4623a6-7cc8-4484-ab06-37bd95fe4c02 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.824145] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1905.824145] env[63371]: value = "task-1775027" [ 1905.824145] env[63371]: _type = "Task" [ 1905.824145] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.831033] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1905.831033] env[63371]: value = "task-1775028" [ 1905.831033] env[63371]: _type = "Task" [ 1905.831033] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.832461] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385beb32-30f8-4156-afd3-cb844d89501e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.842127] env[63371]: DEBUG nova.compute.utils [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1905.843396] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775027, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.856254] env[63371]: DEBUG nova.compute.provider_tree [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1905.862376] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775028, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.862569] env[63371]: DEBUG oslo_concurrency.lockutils [None req-739c42bd-0be5-4255-a940-5f1243ba8b40 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "9885de9e-c640-4d82-a47a-980988d89deb" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.275s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.058084] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.333854] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775027, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071384} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.334234] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1906.334918] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8bb7d45-2a87-4cb5-96b8-01d984d07e19 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.354728] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.019s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.363578] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 37c33e03-30c7-4cf4-99a1-360d892dde2d/37c33e03-30c7-4cf4-99a1-360d892dde2d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1906.367959] env[63371]: DEBUG nova.scheduler.client.report [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1906.370780] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19eb48fb-7af9-4fca-b8c4-ed0e52457f31 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.385274] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775028, 'name': PowerOffVM_Task, 'duration_secs': 0.211038} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.386573] env[63371]: DEBUG oslo_concurrency.lockutils [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.309s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.389118] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1906.389321] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance '05f6f94a-c9c4-4737-8b07-77e9c2093497' progress to 17 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1906.398591] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1906.398591] env[63371]: value = "task-1775029" [ 1906.398591] env[63371]: _type = "Task" [ 1906.398591] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.407147] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775029, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.410887] env[63371]: INFO nova.scheduler.client.report [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleted allocations for instance 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd [ 1906.559085] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.846624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "9885de9e-c640-4d82-a47a-980988d89deb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.846976] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "9885de9e-c640-4d82-a47a-980988d89deb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.847303] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "9885de9e-c640-4d82-a47a-980988d89deb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.847548] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "9885de9e-c640-4d82-a47a-980988d89deb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.847712] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "9885de9e-c640-4d82-a47a-980988d89deb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.850036] env[63371]: INFO nova.compute.manager [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Terminating instance [ 1906.852179] env[63371]: DEBUG nova.compute.manager [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1906.852254] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1906.853536] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-625c99fb-8ddd-47a5-89e9-92963ae37b76 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.860976] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1906.861137] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35fcbfba-0d23-4ef4-849f-1a5d12e2b900 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.867745] env[63371]: DEBUG oslo_vmware.api [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1906.867745] env[63371]: value = "task-1775030" [ 1906.867745] env[63371]: _type = "Task" [ 1906.867745] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.876286] env[63371]: DEBUG oslo_vmware.api [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775030, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.896058] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1906.896058] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1906.896299] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1906.896299] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1906.896462] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1906.896574] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1906.896782] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1906.896940] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1906.897122] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1906.897287] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1906.897457] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1906.903226] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fd4c498-59d7-4785-a52d-c65f9ed816fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.921908] env[63371]: DEBUG oslo_concurrency.lockutils [None req-15c88f06-8481-426e-9e2d-f3da53377804 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "3e2f17e7-8c9c-47c0-afb1-55e56eab74fd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.140s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1906.928651] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775029, 'name': ReconfigVM_Task, 'duration_secs': 0.282056} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.930126] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 37c33e03-30c7-4cf4-99a1-360d892dde2d/37c33e03-30c7-4cf4-99a1-360d892dde2d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1906.930813] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1906.930813] env[63371]: value = "task-1775031" [ 1906.930813] env[63371]: _type = "Task" [ 1906.930813] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.931047] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab7ea2f9-cfef-4c9b-988e-7e30bdb065d5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.942529] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775031, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.943904] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1906.943904] env[63371]: value = "task-1775032" [ 1906.943904] env[63371]: _type = "Task" [ 1906.943904] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.953202] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775032, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.060175] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.380281] env[63371]: DEBUG oslo_vmware.api [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775030, 'name': PowerOffVM_Task, 'duration_secs': 0.275754} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.380651] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1907.380713] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1907.380962] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2ed0b2a-08d3-481e-92cb-959bd4d7c8e7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.424867] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "cfa04c51-c077-4f16-ae57-e54d62aac044" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.425188] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.425431] env[63371]: INFO nova.compute.manager [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Attaching volume 0dab7eed-b384-4a7b-8d58-1cc47753a664 to /dev/sdb [ 1907.443177] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775031, 'name': ReconfigVM_Task, 'duration_secs': 0.231151} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.443513] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance '05f6f94a-c9c4-4737-8b07-77e9c2093497' progress to 33 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1907.455899] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775032, 'name': Rename_Task, 'duration_secs': 0.158158} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.456195] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1907.456444] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5499599b-488e-43d3-9595-2251d9918f59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.461203] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93510bf8-723d-4ae1-b94c-7a0b2780e84a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.465397] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1907.465397] env[63371]: value = "task-1775034" [ 1907.465397] env[63371]: _type = "Task" [ 1907.465397] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.474776] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e03b03a-2bf9-4bf6-8d36-98e137f8e00b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.481833] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775034, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.483136] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1907.483338] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1907.483499] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleting the datastore file [datastore1] 9885de9e-c640-4d82-a47a-980988d89deb {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1907.483803] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a37336c-9afa-4c8c-869b-bc27ed3a0d8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.491112] env[63371]: DEBUG oslo_vmware.api [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1907.491112] env[63371]: value = "task-1775035" [ 1907.491112] env[63371]: _type = "Task" [ 1907.491112] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.495828] env[63371]: DEBUG nova.virt.block_device [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Updating existing volume attachment record: a6296b0d-f327-4d4e-bf80-eb9be3140bf1 {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1907.506587] env[63371]: DEBUG oslo_vmware.api [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775035, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.560917] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.952204] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1907.952204] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1907.952204] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1907.952204] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1907.952838] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1907.953246] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1907.954049] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1907.954049] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1907.954304] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1907.954372] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1907.954571] env[63371]: DEBUG nova.virt.hardware [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1907.963014] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1907.964057] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b2dbb12-ca1d-4bec-ab7a-d11e51bd59db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.988083] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775034, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.989588] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1907.989588] env[63371]: value = "task-1775037" [ 1907.989588] env[63371]: _type = "Task" [ 1907.989588] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.997783] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775037, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.004920] env[63371]: DEBUG oslo_vmware.api [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775035, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.223733} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.005223] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1908.005479] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1908.005708] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1908.005961] env[63371]: INFO nova.compute.manager [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1908.006326] env[63371]: DEBUG oslo.service.loopingcall [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1908.006587] env[63371]: DEBUG nova.compute.manager [-] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1908.006729] env[63371]: DEBUG nova.network.neutron [-] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1908.060699] env[63371]: DEBUG oslo_vmware.api [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775019, 'name': ReconfigVM_Task, 'duration_secs': 5.764} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.060966] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1908.061201] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Reconfigured VM to detach interface {{(pid=63371) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1908.493231] env[63371]: DEBUG oslo_vmware.api [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775034, 'name': PowerOnVM_Task, 'duration_secs': 0.533928} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.497526] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1908.497906] env[63371]: INFO nova.compute.manager [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Took 7.50 seconds to spawn the instance on the hypervisor. [ 1908.497906] env[63371]: DEBUG nova.compute.manager [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1908.499174] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f617fb14-56d1-4c6d-ac3a-2e27122b7a67 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.506852] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775037, 'name': ReconfigVM_Task, 'duration_secs': 0.274281} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.509264] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1908.512887] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-920ada5e-f4bf-445f-a41c-a0cf127fb83b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.518368] env[63371]: DEBUG nova.compute.manager [req-b8e872b9-cec4-4216-a632-009660420fc5 req-4630d258-3c5f-474c-9a11-fe5992b4a72c service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Received event network-vif-deleted-f5b22240-e8c4-447a-bc92-3a83ae9674ec {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1908.518570] env[63371]: INFO nova.compute.manager [req-b8e872b9-cec4-4216-a632-009660420fc5 req-4630d258-3c5f-474c-9a11-fe5992b4a72c service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Neutron deleted interface f5b22240-e8c4-447a-bc92-3a83ae9674ec; detaching it from the instance and deleting it from the info cache [ 1908.518768] env[63371]: DEBUG nova.network.neutron [req-b8e872b9-cec4-4216-a632-009660420fc5 req-4630d258-3c5f-474c-9a11-fe5992b4a72c service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.545212] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 05f6f94a-c9c4-4737-8b07-77e9c2093497/05f6f94a-c9c4-4737-8b07-77e9c2093497.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1908.546633] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2e12275-6dd6-4e07-bdbe-00c332e01184 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.559511] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4577d922-224b-4e0e-9bf8-0a25af9241ee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.570172] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcce77c2-198b-4314-9b60-883525ec3615 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.580635] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1908.580635] env[63371]: value = "task-1775038" [ 1908.580635] env[63371]: _type = "Task" [ 1908.580635] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.589167] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.601407] env[63371]: DEBUG nova.compute.manager [req-b8e872b9-cec4-4216-a632-009660420fc5 req-4630d258-3c5f-474c-9a11-fe5992b4a72c service nova] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Detach interface failed, port_id=f5b22240-e8c4-447a-bc92-3a83ae9674ec, reason: Instance 9885de9e-c640-4d82-a47a-980988d89deb could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1908.766298] env[63371]: DEBUG nova.compute.manager [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Stashing vm_state: active {{(pid=63371) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1908.999451] env[63371]: DEBUG nova.network.neutron [-] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1909.023336] env[63371]: INFO nova.compute.manager [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Took 12.34 seconds to build instance. [ 1909.093008] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775038, 'name': ReconfigVM_Task, 'duration_secs': 0.291935} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.093452] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 05f6f94a-c9c4-4737-8b07-77e9c2093497/05f6f94a-c9c4-4737-8b07-77e9c2093497.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1909.093793] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance '05f6f94a-c9c4-4737-8b07-77e9c2093497' progress to 50 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1909.287246] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1909.287355] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.481035] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1909.481035] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquired lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1909.481035] env[63371]: DEBUG nova.network.neutron [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1909.504208] env[63371]: INFO nova.compute.manager [-] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Took 1.50 seconds to deallocate network for instance. [ 1909.525748] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b6384175-97b6-4989-a8ca-70700cf53ddd tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "37c33e03-30c7-4cf4-99a1-360d892dde2d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.846s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.601171] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f889161c-28e9-4f5f-b5f1-db4c63cace50 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.623530] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d9fc96-91bc-43fc-9e38-f3b8a76a3ec8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.643176] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance '05f6f94a-c9c4-4737-8b07-77e9c2093497' progress to 67 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1909.792674] env[63371]: INFO nova.compute.claims [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1909.859628] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1909.859938] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.860173] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1909.860391] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.860574] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.863321] env[63371]: INFO nova.compute.manager [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Terminating instance [ 1909.865914] env[63371]: DEBUG nova.compute.manager [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1909.866162] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1909.867089] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d689d77a-f8d4-498e-a56f-941959ee2e69 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.875577] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1909.875864] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac7581f0-6ace-47d2-879d-23511061545f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.885051] env[63371]: DEBUG oslo_vmware.api [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1909.885051] env[63371]: value = "task-1775041" [ 1909.885051] env[63371]: _type = "Task" [ 1909.885051] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.893036] env[63371]: DEBUG oslo_vmware.api [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775041, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.010949] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.152998] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "37c33e03-30c7-4cf4-99a1-360d892dde2d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.153274] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "37c33e03-30c7-4cf4-99a1-360d892dde2d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.153480] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "37c33e03-30c7-4cf4-99a1-360d892dde2d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.153709] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "37c33e03-30c7-4cf4-99a1-360d892dde2d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.154055] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "37c33e03-30c7-4cf4-99a1-360d892dde2d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.155842] env[63371]: INFO nova.compute.manager [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Terminating instance [ 1910.157545] env[63371]: DEBUG nova.compute.manager [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1910.157737] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1910.158606] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f44d42-0451-47d0-b97c-19de51074ea3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.168327] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1910.168563] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0512c981-9b77-41a0-aa35-c4d1b842c2c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.174365] env[63371]: DEBUG oslo_vmware.api [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1910.174365] env[63371]: value = "task-1775042" [ 1910.174365] env[63371]: _type = "Task" [ 1910.174365] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.184097] env[63371]: DEBUG oslo_vmware.api [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775042, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.298686] env[63371]: INFO nova.compute.resource_tracker [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating resource usage from migration 699a4f1e-a25a-43be-bdeb-a7b17b4169f3 [ 1910.397913] env[63371]: DEBUG oslo_vmware.api [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775041, 'name': PowerOffVM_Task, 'duration_secs': 0.199844} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.398207] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1910.398374] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1910.398629] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-039a3981-4e7b-4470-aaed-b9b1a56046a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.428539] env[63371]: INFO nova.network.neutron [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Port 3a849b63-f519-49c8-92b3-ad93796fcc9b from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1910.428904] env[63371]: DEBUG nova.network.neutron [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updating instance_info_cache with network_info: [{"id": "2c2ab976-7609-4012-a826-68288c4f7f64", "address": "fa:16:3e:c1:5a:c9", "network": {"id": "78c77028-c23a-4160-8b08-d336e8101b3b", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-118331603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.187", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5a5897667b6b47deb7ff5b64f9499f36", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ba3bd22-c936-470e-89bd-b3a5587e87a0", "external-id": "nsx-vlan-transportzone-605", "segmentation_id": 605, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c2ab976-76", "ovs_interfaceid": "2c2ab976-7609-4012-a826-68288c4f7f64", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1910.494342] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d74d9ab-8b82-4539-830c-4860ca2eed12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.498132] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1910.498353] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1910.498533] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleting the datastore file [datastore1] e0fa0976-9a73-4b8b-b011-2e15199be5ff {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1910.499204] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac16abd9-b723-43a8-9269-74d8ccae9849 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.503946] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0edc3865-8ce2-4948-a8cc-1d609ee6b023 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.508037] env[63371]: DEBUG oslo_vmware.api [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1910.508037] env[63371]: value = "task-1775044" [ 1910.508037] env[63371]: _type = "Task" [ 1910.508037] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.537997] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8dbbb21-e4a2-4c4f-8675-3533009be27b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.543563] env[63371]: DEBUG oslo_vmware.api [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.548343] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b40ceb1b-d56a-432b-b7cd-36b99339cde5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.561592] env[63371]: DEBUG nova.compute.provider_tree [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1910.686827] env[63371]: DEBUG oslo_vmware.api [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775042, 'name': PowerOffVM_Task, 'duration_secs': 0.212935} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.687122] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1910.687297] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1910.687543] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3123fe0-add4-4f61-9740-92dd8c131f58 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.763397] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1910.763627] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1910.763889] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Deleting the datastore file [datastore1] 37c33e03-30c7-4cf4-99a1-360d892dde2d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1910.764292] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e511384b-02c0-4c9d-aacd-6907a2a994d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.772290] env[63371]: DEBUG oslo_vmware.api [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for the task: (returnval){ [ 1910.772290] env[63371]: value = "task-1775046" [ 1910.772290] env[63371]: _type = "Task" [ 1910.772290] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.780031] env[63371]: DEBUG oslo_vmware.api [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775046, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.938437] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Releasing lock "refresh_cache-e0fa0976-9a73-4b8b-b011-2e15199be5ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1911.019251] env[63371]: DEBUG oslo_vmware.api [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195811} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.019533] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1911.019717] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1911.019894] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1911.020078] env[63371]: INFO nova.compute.manager [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1911.020330] env[63371]: DEBUG oslo.service.loopingcall [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1911.020519] env[63371]: DEBUG nova.compute.manager [-] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1911.020615] env[63371]: DEBUG nova.network.neutron [-] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1911.065607] env[63371]: DEBUG nova.scheduler.client.report [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1911.283168] env[63371]: DEBUG oslo_vmware.api [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Task: {'id': task-1775046, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152268} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.283918] env[63371]: DEBUG nova.network.neutron [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Port 5a88ea10-929b-41c9-b1b4-bf61377715c6 binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1911.285217] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1911.285405] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1911.285582] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1911.285750] env[63371]: INFO nova.compute.manager [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1911.285985] env[63371]: DEBUG oslo.service.loopingcall [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1911.286184] env[63371]: DEBUG nova.compute.manager [-] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1911.286274] env[63371]: DEBUG nova.network.neutron [-] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1911.446213] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9d53f6da-07c7-49ce-894a-c1ded321b0ed tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "interface-e0fa0976-9a73-4b8b-b011-2e15199be5ff-3a849b63-f519-49c8-92b3-ad93796fcc9b" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.006s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.545378] env[63371]: DEBUG nova.compute.manager [req-69f337c9-190d-4dec-a4ef-e901c476423a req-1923ecea-ff80-4cf8-9abf-f30f7d8a6489 service nova] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Received event network-vif-deleted-096fb9a4-ccdf-4930-a5cc-b5af3b649443 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1911.545378] env[63371]: INFO nova.compute.manager [req-69f337c9-190d-4dec-a4ef-e901c476423a req-1923ecea-ff80-4cf8-9abf-f30f7d8a6489 service nova] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Neutron deleted interface 096fb9a4-ccdf-4930-a5cc-b5af3b649443; detaching it from the instance and deleting it from the info cache [ 1911.545630] env[63371]: DEBUG nova.network.neutron [req-69f337c9-190d-4dec-a4ef-e901c476423a req-1923ecea-ff80-4cf8-9abf-f30f7d8a6489 service nova] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.572556] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.284s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.572556] env[63371]: INFO nova.compute.manager [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Migrating [ 1911.578236] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.568s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.579021] env[63371]: DEBUG nova.objects.instance [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lazy-loading 'resources' on Instance uuid 9885de9e-c640-4d82-a47a-980988d89deb {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1911.686077] env[63371]: DEBUG nova.compute.manager [req-10b8a478-081f-4bae-81e1-ac1ba3b9cae0 req-7cfa55e2-2c00-4a73-b36d-2b7c91cc0150 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Received event network-vif-deleted-2c2ab976-7609-4012-a826-68288c4f7f64 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1911.686312] env[63371]: INFO nova.compute.manager [req-10b8a478-081f-4bae-81e1-ac1ba3b9cae0 req-7cfa55e2-2c00-4a73-b36d-2b7c91cc0150 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Neutron deleted interface 2c2ab976-7609-4012-a826-68288c4f7f64; detaching it from the instance and deleting it from the info cache [ 1911.686487] env[63371]: DEBUG nova.network.neutron [req-10b8a478-081f-4bae-81e1-ac1ba3b9cae0 req-7cfa55e2-2c00-4a73-b36d-2b7c91cc0150 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1911.985013] env[63371]: DEBUG nova.network.neutron [-] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.043245] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1912.043593] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368510', 'volume_id': '0dab7eed-b384-4a7b-8d58-1cc47753a664', 'name': 'volume-0dab7eed-b384-4a7b-8d58-1cc47753a664', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cfa04c51-c077-4f16-ae57-e54d62aac044', 'attached_at': '', 'detached_at': '', 'volume_id': '0dab7eed-b384-4a7b-8d58-1cc47753a664', 'serial': '0dab7eed-b384-4a7b-8d58-1cc47753a664'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1912.044492] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd8ec2b-cd26-45bf-904c-d8222022f093 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.048994] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4500f016-df92-4d59-9d60-bcf98662050e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.062309] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a56a57-8399-4a39-ac35-21762e6b3e64 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.069180] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a91f542f-3513-4c0e-ade6-945f60560116 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.102080] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] volume-0dab7eed-b384-4a7b-8d58-1cc47753a664/volume-0dab7eed-b384-4a7b-8d58-1cc47753a664.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1912.103548] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.103705] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.103879] env[63371]: DEBUG nova.network.neutron [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1912.105104] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-110cb50b-ff0b-4751-8288-004eb520a5f4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.130885] env[63371]: DEBUG nova.compute.manager [req-69f337c9-190d-4dec-a4ef-e901c476423a req-1923ecea-ff80-4cf8-9abf-f30f7d8a6489 service nova] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Detach interface failed, port_id=096fb9a4-ccdf-4930-a5cc-b5af3b649443, reason: Instance 37c33e03-30c7-4cf4-99a1-360d892dde2d could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1912.138162] env[63371]: DEBUG oslo_vmware.api [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1912.138162] env[63371]: value = "task-1775048" [ 1912.138162] env[63371]: _type = "Task" [ 1912.138162] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.146630] env[63371]: DEBUG oslo_vmware.api [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1775048, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.167043] env[63371]: DEBUG nova.network.neutron [-] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.189163] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c17ee6a-a454-4e05-bad1-e948d8c491a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.200042] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ea9f6d-360c-4aa8-8e86-c0dbe74a42dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.212391] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1912.212771] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1912.234447] env[63371]: DEBUG nova.compute.manager [req-10b8a478-081f-4bae-81e1-ac1ba3b9cae0 req-7cfa55e2-2c00-4a73-b36d-2b7c91cc0150 service nova] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Detach interface failed, port_id=2c2ab976-7609-4012-a826-68288c4f7f64, reason: Instance e0fa0976-9a73-4b8b-b011-2e15199be5ff could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1912.305506] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62cf9041-f3ba-464e-8bdc-baaffe66c3c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.315131] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "05f6f94a-c9c4-4737-8b07-77e9c2093497-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.315422] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.315601] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1912.322804] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5037bbd9-c788-4810-ac01-c9ae0b045b80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.358664] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e17fc21-e1a7-4d57-8f6e-fbb0bb076187 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.367400] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d744975f-bda2-4124-b0e8-24beabf3ceee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.381803] env[63371]: DEBUG nova.compute.provider_tree [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1912.394964] env[63371]: DEBUG nova.network.neutron [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [{"id": "01b878e5-651e-49f1-959f-7da17291c0bc", "address": "fa:16:3e:b7:c4:0c", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b878e5-65", "ovs_interfaceid": "01b878e5-651e-49f1-959f-7da17291c0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.488072] env[63371]: INFO nova.compute.manager [-] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Took 1.20 seconds to deallocate network for instance. [ 1912.647961] env[63371]: DEBUG oslo_vmware.api [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1775048, 'name': ReconfigVM_Task, 'duration_secs': 0.457506} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.648283] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Reconfigured VM instance instance-00000073 to attach disk [datastore1] volume-0dab7eed-b384-4a7b-8d58-1cc47753a664/volume-0dab7eed-b384-4a7b-8d58-1cc47753a664.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1912.653394] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e342c371-9f2b-44e7-b03a-8b5a6d9d40d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.668510] env[63371]: INFO nova.compute.manager [-] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Took 1.65 seconds to deallocate network for instance. [ 1912.668852] env[63371]: DEBUG oslo_vmware.api [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1912.668852] env[63371]: value = "task-1775049" [ 1912.668852] env[63371]: _type = "Task" [ 1912.668852] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.680521] env[63371]: DEBUG oslo_vmware.api [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1775049, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.723032] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1912.723032] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1912.723245] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 1912.885064] env[63371]: DEBUG nova.scheduler.client.report [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1912.897658] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.994755] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.180080] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.180345] env[63371]: DEBUG oslo_vmware.api [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1775049, 'name': ReconfigVM_Task, 'duration_secs': 0.13083} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.180623] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368510', 'volume_id': '0dab7eed-b384-4a7b-8d58-1cc47753a664', 'name': 'volume-0dab7eed-b384-4a7b-8d58-1cc47753a664', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cfa04c51-c077-4f16-ae57-e54d62aac044', 'attached_at': '', 'detached_at': '', 'volume_id': '0dab7eed-b384-4a7b-8d58-1cc47753a664', 'serial': '0dab7eed-b384-4a7b-8d58-1cc47753a664'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1913.227241] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.227386] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquired lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.227515] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Forcefully refreshing network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1913.227657] env[63371]: DEBUG nova.objects.instance [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lazy-loading 'info_cache' on Instance uuid 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1913.347871] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1913.348088] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1913.348269] env[63371]: DEBUG nova.network.neutron [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1913.390624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.812s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1913.392776] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.398s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1913.393008] env[63371]: DEBUG nova.objects.instance [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lazy-loading 'resources' on Instance uuid 37c33e03-30c7-4cf4-99a1-360d892dde2d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1913.414355] env[63371]: INFO nova.scheduler.client.report [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted allocations for instance 9885de9e-c640-4d82-a47a-980988d89deb [ 1913.921672] env[63371]: DEBUG oslo_concurrency.lockutils [None req-3a61a8aa-efac-4c8c-9862-7f2a9a589aa1 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "9885de9e-c640-4d82-a47a-980988d89deb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.075s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.078744] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046c1acd-7ab6-4130-a3a6-950393f75641 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.088948] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c036a327-0fb0-467d-95a5-3d22c00ed15b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.119253] env[63371]: DEBUG nova.network.neutron [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance_info_cache with network_info: [{"id": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "address": "fa:16:3e:b6:f8:d2", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a88ea10-92", "ovs_interfaceid": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.121105] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b68957f-5adf-49c2-ba05-91d5c4df18f8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.128873] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72a62ae0-da13-43ae-8d9f-ec01329589fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.145327] env[63371]: DEBUG nova.compute.provider_tree [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1914.214981] env[63371]: DEBUG nova.objects.instance [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lazy-loading 'flavor' on Instance uuid cfa04c51-c077-4f16-ae57-e54d62aac044 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1914.411466] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4835f9b-9dde-41b8-83b0-6325cc422cda {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.431781] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance '88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec' progress to 0 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1914.625557] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.648414] env[63371]: DEBUG nova.scheduler.client.report [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1914.719534] env[63371]: DEBUG oslo_concurrency.lockutils [None req-6c253cbb-1bc6-48ae-b3db-024cc49e63ae tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.294s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.855493] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "cfa04c51-c077-4f16-ae57-e54d62aac044" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.855896] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.937474] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1914.937864] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de76d5f4-7f44-469d-a33a-8c8331c869a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.945697] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1914.945697] env[63371]: value = "task-1775050" [ 1914.945697] env[63371]: _type = "Task" [ 1914.945697] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1914.954420] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775050, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.013743] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [{"id": "01b878e5-651e-49f1-959f-7da17291c0bc", "address": "fa:16:3e:b7:c4:0c", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b878e5-65", "ovs_interfaceid": "01b878e5-651e-49f1-959f-7da17291c0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1915.134300] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c111c1-45a2-46d1-bb4c-332c75befb6c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.143146] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fae0b4-8eb2-45b9-94d6-1f0f7c009923 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.152956] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.760s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.159958] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.979s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.159958] env[63371]: DEBUG nova.objects.instance [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'resources' on Instance uuid e0fa0976-9a73-4b8b-b011-2e15199be5ff {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1915.180784] env[63371]: INFO nova.scheduler.client.report [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Deleted allocations for instance 37c33e03-30c7-4cf4-99a1-360d892dde2d [ 1915.359674] env[63371]: INFO nova.compute.manager [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Detaching volume 0dab7eed-b384-4a7b-8d58-1cc47753a664 [ 1915.396752] env[63371]: INFO nova.virt.block_device [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Attempting to driver detach volume 0dab7eed-b384-4a7b-8d58-1cc47753a664 from mountpoint /dev/sdb [ 1915.397007] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1915.397205] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368510', 'volume_id': '0dab7eed-b384-4a7b-8d58-1cc47753a664', 'name': 'volume-0dab7eed-b384-4a7b-8d58-1cc47753a664', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cfa04c51-c077-4f16-ae57-e54d62aac044', 'attached_at': '', 'detached_at': '', 'volume_id': '0dab7eed-b384-4a7b-8d58-1cc47753a664', 'serial': '0dab7eed-b384-4a7b-8d58-1cc47753a664'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1915.398372] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23a528c-5284-4dd1-9bb6-de54f67fff7d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.420768] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d6e2ac-20ba-40f7-8119-cc1c066daf50 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.427733] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c023c927-64b2-481a-864c-e366a246bbf6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.451323] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0b3b2c-2b55-42c5-b5f6-048eff528699 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.458837] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775050, 'name': PowerOffVM_Task, 'duration_secs': 0.278017} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.469081] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1915.469304] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance '88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec' progress to 17 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1915.475172] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] The volume has not been displaced from its original location: [datastore1] volume-0dab7eed-b384-4a7b-8d58-1cc47753a664/volume-0dab7eed-b384-4a7b-8d58-1cc47753a664.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1915.478575] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Reconfiguring VM instance instance-00000073 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1915.478898] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e45e35c-bbfd-44d0-9b5a-c86dc24d6714 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.497677] env[63371]: DEBUG oslo_vmware.api [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1915.497677] env[63371]: value = "task-1775051" [ 1915.497677] env[63371]: _type = "Task" [ 1915.497677] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.505685] env[63371]: DEBUG oslo_vmware.api [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1775051, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.516608] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Releasing lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.516793] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updated the network info_cache for instance {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1915.516976] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.517151] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.517582] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.517747] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.518271] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.518271] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.518271] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1915.518413] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1915.692092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9eba90f4-1ae2-4309-8d11-e53dfa20f584 tempest-ImagesOneServerNegativeTestJSON-508478447 tempest-ImagesOneServerNegativeTestJSON-508478447-project-member] Lock "37c33e03-30c7-4cf4-99a1-360d892dde2d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.538s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.836566] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72bbc73-d70b-4074-932f-9b5699f7fee8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.844180] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30a54bf-c221-4ace-80cf-c14db8f8b75d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.874402] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f64dc57-5b34-4810-a2e8-7c8428de38a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.883665] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d2a410-e981-454b-bae4-71bceb32fd6f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.898145] env[63371]: DEBUG nova.compute.provider_tree [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1915.964109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "29791f6c-edec-44b3-828b-0e306d167c42" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.965060] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "29791f6c-edec-44b3-828b-0e306d167c42" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.981849] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1915.982723] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1915.982883] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1915.984390] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1915.984390] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1915.984390] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1915.984390] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1915.984390] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1915.984790] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1915.984790] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1915.985293] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1915.991276] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53caf75c-aef0-4a5f-9492-58bac46dd422 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.012201] env[63371]: DEBUG oslo_vmware.api [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1775051, 'name': ReconfigVM_Task, 'duration_secs': 0.234926} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.013423] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Reconfigured VM instance instance-00000073 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1916.018126] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1916.018126] env[63371]: value = "task-1775052" [ 1916.018126] env[63371]: _type = "Task" [ 1916.018126] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.018425] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-156ecfcc-e854-4d26-9f00-bee4360ba79b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.029643] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.039182] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775052, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.040747] env[63371]: DEBUG oslo_vmware.api [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1916.040747] env[63371]: value = "task-1775053" [ 1916.040747] env[63371]: _type = "Task" [ 1916.040747] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.048247] env[63371]: DEBUG oslo_vmware.api [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1775053, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.265814] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da7a0b32-603d-413f-9bc5-34d22f801f51 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.297042] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da4b654-bb8a-4d1c-9fd3-33cecbf0c4f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.304367] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance '05f6f94a-c9c4-4737-8b07-77e9c2093497' progress to 83 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1916.401891] env[63371]: DEBUG nova.scheduler.client.report [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1916.468546] env[63371]: DEBUG nova.compute.manager [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1916.538543] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775052, 'name': ReconfigVM_Task, 'duration_secs': 0.153401} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.538856] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance '88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec' progress to 33 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1916.553983] env[63371]: DEBUG oslo_vmware.api [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1775053, 'name': ReconfigVM_Task, 'duration_secs': 0.15393} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.554297] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368510', 'volume_id': '0dab7eed-b384-4a7b-8d58-1cc47753a664', 'name': 'volume-0dab7eed-b384-4a7b-8d58-1cc47753a664', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'cfa04c51-c077-4f16-ae57-e54d62aac044', 'attached_at': '', 'detached_at': '', 'volume_id': '0dab7eed-b384-4a7b-8d58-1cc47753a664', 'serial': '0dab7eed-b384-4a7b-8d58-1cc47753a664'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1916.810961] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1916.811364] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78a3272d-e512-4332-9de1-ea399cff7b49 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.818779] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1916.818779] env[63371]: value = "task-1775054" [ 1916.818779] env[63371]: _type = "Task" [ 1916.818779] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.826732] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775054, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.912842] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.754s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.916073] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.886s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.916290] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.916453] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1916.917457] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd1623b-2cae-4fdb-b391-47d40f153d9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.926547] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a651ff6-2ef8-468a-98c9-0dda2129ef71 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.943436] env[63371]: INFO nova.scheduler.client.report [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleted allocations for instance e0fa0976-9a73-4b8b-b011-2e15199be5ff [ 1916.944779] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9428976d-b488-436a-8ab3-4616538a2fc9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.953904] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced3924f-071a-434c-adc3-f4f03c4afabd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.985670] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178797MB free_disk=164GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1916.986177] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.986177] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.001781] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1917.052051] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1917.052051] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1917.052051] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1917.052051] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1917.052051] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1917.052595] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1917.052959] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1917.054019] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1917.054019] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1917.054019] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1917.054019] env[63371]: DEBUG nova.virt.hardware [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1917.059233] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Reconfiguring VM instance instance-00000032 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1917.060093] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1549266-f93f-453c-ad89-8292aab0c49b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.078195] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1917.078195] env[63371]: value = "task-1775055" [ 1917.078195] env[63371]: _type = "Task" [ 1917.078195] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.086622] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775055, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.106018] env[63371]: DEBUG nova.objects.instance [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lazy-loading 'flavor' on Instance uuid cfa04c51-c077-4f16-ae57-e54d62aac044 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1917.328304] env[63371]: DEBUG oslo_vmware.api [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775054, 'name': PowerOnVM_Task, 'duration_secs': 0.402371} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.328661] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1917.328918] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4f5491db-ddbd-4298-8d56-6853e24befa6 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance '05f6f94a-c9c4-4737-8b07-77e9c2093497' progress to 100 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1917.452998] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e8e0b634-058e-49c0-b283-8b27e38bb770 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "e0fa0976-9a73-4b8b-b011-2e15199be5ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.593s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.588084] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775055, 'name': ReconfigVM_Task, 'duration_secs': 0.190555} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.588383] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Reconfigured VM instance instance-00000032 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1917.589190] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2282902-865a-43f1-b0b9-f75bd3c72d74 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.612342] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec/88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1917.614371] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-321e08db-cca8-4013-86e2-c156ce823a6f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.634523] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1917.634523] env[63371]: value = "task-1775056" [ 1917.634523] env[63371]: _type = "Task" [ 1917.634523] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.644649] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775056, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.998063] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Applying migration context for instance 05f6f94a-c9c4-4737-8b07-77e9c2093497 as it has an incoming, in-progress migration 14f006ac-29e5-4cd6-9ce5-48006f177492. Migration status is post-migrating {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1917.998356] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Applying migration context for instance 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec as it has an incoming, in-progress migration 699a4f1e-a25a-43be-bdeb-a7b17b4169f3. Migration status is migrating {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1917.999561] env[63371]: INFO nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating resource usage from migration 14f006ac-29e5-4cd6-9ce5-48006f177492 [ 1917.999885] env[63371]: INFO nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating resource usage from migration 699a4f1e-a25a-43be-bdeb-a7b17b4169f3 [ 1918.023489] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 6df9af10-0053-4696-920a-10ab2af67ef5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.023489] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 0518c5a8-8cc1-4829-a0cf-5f5904f6df86 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.023489] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 1ec21edd-7b7c-4a2b-983f-8aa6c022e033 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.023489] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.023489] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance ff724a9f-5e9a-4683-8eb3-058fb3639ea5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.023489] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance cfa04c51-c077-4f16-ae57-e54d62aac044 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.023489] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Migration 14f006ac-29e5-4cd6-9ce5-48006f177492 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1918.023489] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 05f6f94a-c9c4-4737-8b07-77e9c2093497 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.023489] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Migration 699a4f1e-a25a-43be-bdeb-a7b17b4169f3 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1918.023489] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1918.130754] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ba1c5e7d-a005-4b8c-ba55-366bd6cab2a3 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.275s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.148171] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775056, 'name': ReconfigVM_Task, 'duration_secs': 0.479108} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.148171] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec/88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1918.149087] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance '88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec' progress to 50 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1918.526280] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 29791f6c-edec-44b3-828b-0e306d167c42 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1918.526560] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1918.526708] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2560MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1918.604265] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.604265] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.604265] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.604265] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.604554] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.608963] env[63371]: INFO nova.compute.manager [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Terminating instance [ 1918.611823] env[63371]: DEBUG nova.compute.manager [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1918.611823] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1918.612712] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14bbbc06-4344-4523-8ae9-2c329a1b25c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.623136] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1918.623404] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e612a6d8-255c-408d-9f84-188d15c47f81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.630693] env[63371]: DEBUG oslo_vmware.api [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1918.630693] env[63371]: value = "task-1775057" [ 1918.630693] env[63371]: _type = "Task" [ 1918.630693] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.642165] env[63371]: DEBUG oslo_vmware.api [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775057, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.656162] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306ee99f-0f16-4fe0-a5ac-74d5b8b0a9b9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.681091] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "cfa04c51-c077-4f16-ae57-e54d62aac044" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.681357] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.681555] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "cfa04c51-c077-4f16-ae57-e54d62aac044-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.681736] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.681906] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.686138] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93734d4-011d-46ff-9333-1bf868307220 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.690046] env[63371]: INFO nova.compute.manager [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Terminating instance [ 1918.692866] env[63371]: DEBUG nova.compute.manager [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1918.693161] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1918.694631] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63e43b23-ed4b-45c4-ac3f-e1c94e3ae4ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.713315] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance '88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec' progress to 67 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1918.720980] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2830d3bb-b6e9-49ce-9281-f69a759cbc0c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.724103] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1918.724764] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3621899f-df34-435c-a5dd-36cf906b5af9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.729674] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e5470a-aab0-4471-9d97-44273a07d360 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.734169] env[63371]: DEBUG oslo_vmware.api [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1918.734169] env[63371]: value = "task-1775058" [ 1918.734169] env[63371]: _type = "Task" [ 1918.734169] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.766890] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a097b9e-53b1-4e3a-b85b-c89b9293984a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.773232] env[63371]: DEBUG oslo_vmware.api [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1775058, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.778719] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c45e83-eb64-474e-9373-4622bc099f4d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.795017] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1919.144188] env[63371]: DEBUG oslo_vmware.api [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775057, 'name': PowerOffVM_Task, 'duration_secs': 0.192334} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.144700] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1919.144700] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1919.144963] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6471d397-4085-4cf4-9993-9173010d9fe5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.211240] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1919.211454] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1919.211623] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleting the datastore file [datastore1] 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1919.211883] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3b5a86b-fc99-4dc0-bc56-0266f915ffe4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.218284] env[63371]: DEBUG oslo_vmware.api [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for the task: (returnval){ [ 1919.218284] env[63371]: value = "task-1775060" [ 1919.218284] env[63371]: _type = "Task" [ 1919.218284] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.229742] env[63371]: DEBUG oslo_vmware.api [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775060, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.243256] env[63371]: DEBUG oslo_vmware.api [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1775058, 'name': PowerOffVM_Task, 'duration_secs': 0.205068} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.243256] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1919.243256] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1919.243627] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b19d20b4-607b-431d-a73a-9a7480f659ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.264612] env[63371]: DEBUG nova.network.neutron [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Port 01b878e5-651e-49f1-959f-7da17291c0bc binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1919.301845] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1919.317108] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1919.317108] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1919.317108] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Deleting the datastore file [datastore1] cfa04c51-c077-4f16-ae57-e54d62aac044 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1919.317519] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cbb20316-4096-47c4-aaab-99447c8f9b9e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.323889] env[63371]: DEBUG oslo_vmware.api [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for the task: (returnval){ [ 1919.323889] env[63371]: value = "task-1775062" [ 1919.323889] env[63371]: _type = "Task" [ 1919.323889] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.332386] env[63371]: DEBUG oslo_vmware.api [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1775062, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.401949] env[63371]: DEBUG nova.network.neutron [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Port 5a88ea10-929b-41c9-b1b4-bf61377715c6 binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1919.402223] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.402366] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.402524] env[63371]: DEBUG nova.network.neutron [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1919.729065] env[63371]: DEBUG oslo_vmware.api [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Task: {'id': task-1775060, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197903} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.729357] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1919.729571] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1919.729772] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1919.729961] env[63371]: INFO nova.compute.manager [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1919.730249] env[63371]: DEBUG oslo.service.loopingcall [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1919.730466] env[63371]: DEBUG nova.compute.manager [-] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1919.730571] env[63371]: DEBUG nova.network.neutron [-] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1919.807939] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1919.808228] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.822s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.808598] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.807s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.810316] env[63371]: INFO nova.compute.claims [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1919.834806] env[63371]: DEBUG oslo_vmware.api [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Task: {'id': task-1775062, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.153668} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.835810] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1919.836018] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1919.836218] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1919.836395] env[63371]: INFO nova.compute.manager [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1919.836649] env[63371]: DEBUG oslo.service.loopingcall [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1919.836867] env[63371]: DEBUG nova.compute.manager [-] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1919.836974] env[63371]: DEBUG nova.network.neutron [-] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1920.143743] env[63371]: DEBUG nova.compute.manager [req-54019b3b-5be1-4af9-97b5-6a7c9c2447ac req-07ab3f19-e93a-4e4a-8497-f98de170c9f8 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Received event network-vif-deleted-39fe8c75-7aaa-42da-a231-9c68310ef7c8 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1920.143961] env[63371]: INFO nova.compute.manager [req-54019b3b-5be1-4af9-97b5-6a7c9c2447ac req-07ab3f19-e93a-4e4a-8497-f98de170c9f8 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Neutron deleted interface 39fe8c75-7aaa-42da-a231-9c68310ef7c8; detaching it from the instance and deleting it from the info cache [ 1920.144167] env[63371]: DEBUG nova.network.neutron [req-54019b3b-5be1-4af9-97b5-6a7c9c2447ac req-07ab3f19-e93a-4e4a-8497-f98de170c9f8 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.290312] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.293463] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.294011] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.003s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.503197] env[63371]: DEBUG nova.network.neutron [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance_info_cache with network_info: [{"id": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "address": "fa:16:3e:b6:f8:d2", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a88ea10-92", "ovs_interfaceid": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.533461] env[63371]: DEBUG nova.network.neutron [-] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.647242] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd6d3ec9-e5d3-4aa9-9d23-474a3ff4701e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.664805] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66ab405-c5f6-4b7e-b40d-8c2b4cfde195 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.701787] env[63371]: DEBUG nova.compute.manager [req-54019b3b-5be1-4af9-97b5-6a7c9c2447ac req-07ab3f19-e93a-4e4a-8497-f98de170c9f8 service nova] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Detach interface failed, port_id=39fe8c75-7aaa-42da-a231-9c68310ef7c8, reason: Instance 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1920.991512] env[63371]: DEBUG nova.compute.manager [req-3d7ffa0b-4221-4f70-9761-241197c81ac4 req-be6912ed-5e60-434f-be50-ac6bfb50adeb service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Received event network-vif-deleted-3856f78a-36e6-49ce-8a81-1e94a9c8f1cc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1920.991749] env[63371]: INFO nova.compute.manager [req-3d7ffa0b-4221-4f70-9761-241197c81ac4 req-be6912ed-5e60-434f-be50-ac6bfb50adeb service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Neutron deleted interface 3856f78a-36e6-49ce-8a81-1e94a9c8f1cc; detaching it from the instance and deleting it from the info cache [ 1920.991942] env[63371]: DEBUG nova.network.neutron [req-3d7ffa0b-4221-4f70-9761-241197c81ac4 req-be6912ed-5e60-434f-be50-ac6bfb50adeb service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1921.000181] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91141d99-af0d-4fd7-bac8-b635d6087ebd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.005402] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1921.011688] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d189b91-381e-4b06-a0f5-5282c707b550 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.045677] env[63371]: INFO nova.compute.manager [-] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Took 1.31 seconds to deallocate network for instance. [ 1921.047460] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d841a1-938b-4b16-82ad-844b3ace4f01 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.061784] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9a38b6-c55a-4e35-ba69-e5c09220d890 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.076610] env[63371]: DEBUG nova.compute.provider_tree [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1921.366252] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.366675] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.366675] env[63371]: DEBUG nova.network.neutron [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1921.460380] env[63371]: DEBUG nova.network.neutron [-] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1921.504949] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94694eab-abde-4637-b1f4-97e636ceb2ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.511144] env[63371]: DEBUG nova.compute.manager [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63371) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 1921.517960] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7be11f-df78-4a27-99fc-4cc8e71836f0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.552558] env[63371]: DEBUG nova.compute.manager [req-3d7ffa0b-4221-4f70-9761-241197c81ac4 req-be6912ed-5e60-434f-be50-ac6bfb50adeb service nova] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Detach interface failed, port_id=3856f78a-36e6-49ce-8a81-1e94a9c8f1cc, reason: Instance cfa04c51-c077-4f16-ae57-e54d62aac044 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1921.558109] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.583038] env[63371]: DEBUG nova.scheduler.client.report [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1921.963427] env[63371]: INFO nova.compute.manager [-] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Took 2.13 seconds to deallocate network for instance. [ 1922.090815] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.282s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.091554] env[63371]: DEBUG nova.compute.manager [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1922.094566] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.537s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.094802] env[63371]: DEBUG nova.objects.instance [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lazy-loading 'resources' on Instance uuid 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1922.248530] env[63371]: DEBUG nova.network.neutron [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [{"id": "01b878e5-651e-49f1-959f-7da17291c0bc", "address": "fa:16:3e:b7:c4:0c", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b878e5-65", "ovs_interfaceid": "01b878e5-651e-49f1-959f-7da17291c0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.474222] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.597364] env[63371]: DEBUG nova.compute.utils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1922.598823] env[63371]: DEBUG nova.compute.manager [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1922.599060] env[63371]: DEBUG nova.network.neutron [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1922.648767] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.669449] env[63371]: DEBUG nova.policy [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ef97c1a9a174c1888972e6f281eecbe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2601d597b4d64481ace490d56d1056a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1922.751468] env[63371]: DEBUG oslo_concurrency.lockutils [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1922.757271] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bfc7341-7971-4375-a7d3-5c47c3cd33ff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.764803] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea089608-6168-4af8-bb51-ffd9ed9b140e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.796490] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce64df4a-637a-44ea-9a62-a475cad0b603 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.805085] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c420902f-144d-4a47-bd40-f66e6e6575fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.818959] env[63371]: DEBUG nova.compute.provider_tree [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1923.104967] env[63371]: DEBUG nova.network.neutron [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Successfully created port: 34275fec-e3cb-4276-9619-f3498ff59a3a {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1923.107375] env[63371]: DEBUG nova.compute.manager [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1923.277731] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d38e1b28-63ee-488f-b663-2279dc68fad8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.298394] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae83f08f-5084-4e29-a543-06a026bfe746 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.305436] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance '88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec' progress to 83 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1923.325303] env[63371]: DEBUG nova.scheduler.client.report [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1923.811848] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1923.812234] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-deec1862-955a-476b-bf56-8df67c947f2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.820343] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1923.820343] env[63371]: value = "task-1775063" [ 1923.820343] env[63371]: _type = "Task" [ 1923.820343] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.828632] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775063, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.830381] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.736s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1923.832551] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.358s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1923.832782] env[63371]: DEBUG nova.objects.instance [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lazy-loading 'resources' on Instance uuid cfa04c51-c077-4f16-ae57-e54d62aac044 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1923.850399] env[63371]: INFO nova.scheduler.client.report [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Deleted allocations for instance 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce [ 1924.117072] env[63371]: DEBUG nova.compute.manager [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1924.139525] env[63371]: DEBUG nova.virt.hardware [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1924.139960] env[63371]: DEBUG nova.virt.hardware [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1924.139960] env[63371]: DEBUG nova.virt.hardware [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1924.140109] env[63371]: DEBUG nova.virt.hardware [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1924.140264] env[63371]: DEBUG nova.virt.hardware [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1924.140399] env[63371]: DEBUG nova.virt.hardware [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1924.140605] env[63371]: DEBUG nova.virt.hardware [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1924.140761] env[63371]: DEBUG nova.virt.hardware [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1924.140927] env[63371]: DEBUG nova.virt.hardware [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1924.141198] env[63371]: DEBUG nova.virt.hardware [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1924.141330] env[63371]: DEBUG nova.virt.hardware [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1924.142200] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ea81394-fbe2-4be3-a327-3faa96ee3aa3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.150094] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a67a0e1-da1f-4289-8b60-2340b0bd27d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.329995] env[63371]: DEBUG oslo_vmware.api [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775063, 'name': PowerOnVM_Task, 'duration_secs': 0.430555} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.330286] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1924.330473] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-50cff10a-886f-4a1a-b72a-3057068505cd tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance '88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec' progress to 100 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1924.360475] env[63371]: DEBUG oslo_concurrency.lockutils [None req-7ed16062-82a2-469e-8684-2a5083784966 tempest-AttachInterfacesTestJSON-796074902 tempest-AttachInterfacesTestJSON-796074902-project-member] Lock "3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.756s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.467187] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5358e17d-d247-40e7-b057-e31708a8b50c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.474865] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64b1dc6-9f24-4522-aaf3-fc7f1052e430 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.504990] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197b5b17-61f9-4bdb-8398-e88f328801c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.512761] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5486074b-3088-4c48-86da-d095651e33bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.527156] env[63371]: DEBUG nova.compute.provider_tree [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1924.546399] env[63371]: DEBUG nova.compute.manager [req-bb422527-f2f4-4449-a2fa-b392b46b3dd7 req-40eec8f7-99ed-48c7-ac4a-6129361373bf service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Received event network-vif-plugged-34275fec-e3cb-4276-9619-f3498ff59a3a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1924.546399] env[63371]: DEBUG oslo_concurrency.lockutils [req-bb422527-f2f4-4449-a2fa-b392b46b3dd7 req-40eec8f7-99ed-48c7-ac4a-6129361373bf service nova] Acquiring lock "29791f6c-edec-44b3-828b-0e306d167c42-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.546399] env[63371]: DEBUG oslo_concurrency.lockutils [req-bb422527-f2f4-4449-a2fa-b392b46b3dd7 req-40eec8f7-99ed-48c7-ac4a-6129361373bf service nova] Lock "29791f6c-edec-44b3-828b-0e306d167c42-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.546399] env[63371]: DEBUG oslo_concurrency.lockutils [req-bb422527-f2f4-4449-a2fa-b392b46b3dd7 req-40eec8f7-99ed-48c7-ac4a-6129361373bf service nova] Lock "29791f6c-edec-44b3-828b-0e306d167c42-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.546399] env[63371]: DEBUG nova.compute.manager [req-bb422527-f2f4-4449-a2fa-b392b46b3dd7 req-40eec8f7-99ed-48c7-ac4a-6129361373bf service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] No waiting events found dispatching network-vif-plugged-34275fec-e3cb-4276-9619-f3498ff59a3a {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1924.546399] env[63371]: WARNING nova.compute.manager [req-bb422527-f2f4-4449-a2fa-b392b46b3dd7 req-40eec8f7-99ed-48c7-ac4a-6129361373bf service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Received unexpected event network-vif-plugged-34275fec-e3cb-4276-9619-f3498ff59a3a for instance with vm_state building and task_state spawning. [ 1924.649993] env[63371]: DEBUG nova.network.neutron [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Successfully updated port: 34275fec-e3cb-4276-9619-f3498ff59a3a {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1925.030827] env[63371]: DEBUG nova.scheduler.client.report [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1925.153210] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "refresh_cache-29791f6c-edec-44b3-828b-0e306d167c42" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.154920] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "refresh_cache-29791f6c-edec-44b3-828b-0e306d167c42" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.154920] env[63371]: DEBUG nova.network.neutron [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1925.535059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.702s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.537425] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 2.889s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.562380] env[63371]: INFO nova.scheduler.client.report [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Deleted allocations for instance cfa04c51-c077-4f16-ae57-e54d62aac044 [ 1925.702623] env[63371]: DEBUG nova.network.neutron [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1925.973365] env[63371]: DEBUG nova.network.neutron [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Updating instance_info_cache with network_info: [{"id": "34275fec-e3cb-4276-9619-f3498ff59a3a", "address": "fa:16:3e:ad:41:b5", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34275fec-e3", "ovs_interfaceid": "34275fec-e3cb-4276-9619-f3498ff59a3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1926.040959] env[63371]: DEBUG nova.objects.instance [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'migration_context' on Instance uuid 05f6f94a-c9c4-4737-8b07-77e9c2093497 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1926.071705] env[63371]: DEBUG oslo_concurrency.lockutils [None req-aaf64139-cd7e-4e91-bec8-3745ac58f565 tempest-AttachVolumeNegativeTest-1836472214 tempest-AttachVolumeNegativeTest-1836472214-project-member] Lock "cfa04c51-c077-4f16-ae57-e54d62aac044" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.390s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.476221] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "refresh_cache-29791f6c-edec-44b3-828b-0e306d167c42" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.476570] env[63371]: DEBUG nova.compute.manager [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Instance network_info: |[{"id": "34275fec-e3cb-4276-9619-f3498ff59a3a", "address": "fa:16:3e:ad:41:b5", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34275fec-e3", "ovs_interfaceid": "34275fec-e3cb-4276-9619-f3498ff59a3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1926.477012] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:41:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '34275fec-e3cb-4276-9619-f3498ff59a3a', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1926.484757] env[63371]: DEBUG oslo.service.loopingcall [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1926.484959] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1926.485283] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-30f80ac2-004c-420f-a3d7-360ee16287a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.500463] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.501222] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.501222] env[63371]: DEBUG nova.compute.manager [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Going to confirm migration 8 {{(pid=63371) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1926.507891] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1926.507891] env[63371]: value = "task-1775064" [ 1926.507891] env[63371]: _type = "Task" [ 1926.507891] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.516250] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775064, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.580173] env[63371]: DEBUG nova.compute.manager [req-ac5b9a19-8952-40d8-ad35-9e2244a47f94 req-81c9664b-84ff-4dbf-8968-dc647474c166 service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Received event network-changed-34275fec-e3cb-4276-9619-f3498ff59a3a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1926.580462] env[63371]: DEBUG nova.compute.manager [req-ac5b9a19-8952-40d8-ad35-9e2244a47f94 req-81c9664b-84ff-4dbf-8968-dc647474c166 service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Refreshing instance network info cache due to event network-changed-34275fec-e3cb-4276-9619-f3498ff59a3a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1926.580689] env[63371]: DEBUG oslo_concurrency.lockutils [req-ac5b9a19-8952-40d8-ad35-9e2244a47f94 req-81c9664b-84ff-4dbf-8968-dc647474c166 service nova] Acquiring lock "refresh_cache-29791f6c-edec-44b3-828b-0e306d167c42" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.580833] env[63371]: DEBUG oslo_concurrency.lockutils [req-ac5b9a19-8952-40d8-ad35-9e2244a47f94 req-81c9664b-84ff-4dbf-8968-dc647474c166 service nova] Acquired lock "refresh_cache-29791f6c-edec-44b3-828b-0e306d167c42" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.580996] env[63371]: DEBUG nova.network.neutron [req-ac5b9a19-8952-40d8-ad35-9e2244a47f94 req-81c9664b-84ff-4dbf-8968-dc647474c166 service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Refreshing network info cache for port 34275fec-e3cb-4276-9619-f3498ff59a3a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1926.718753] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06724426-afa3-4afe-a0e8-4c57ec6b855f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.728788] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07dab46c-6919-428b-ab33-304792950f28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.764019] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f32169-9b18-420e-b055-96706893fdd1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.771322] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ddcf78-e96b-45d4-b866-6f24551b65fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.785372] env[63371]: DEBUG nova.compute.provider_tree [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1927.017617] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775064, 'name': CreateVM_Task, 'duration_secs': 0.319734} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.017800] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1927.026575] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.026575] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.026575] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1927.026575] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0e2a235-8db3-499d-a9c6-b8fec26d698f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.031095] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1927.031095] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520a19ec-2527-92ec-4066-8e37e387bfec" [ 1927.031095] env[63371]: _type = "Task" [ 1927.031095] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.038930] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520a19ec-2527-92ec-4066-8e37e387bfec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.060469] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.060724] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.060824] env[63371]: DEBUG nova.network.neutron [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1927.061020] env[63371]: DEBUG nova.objects.instance [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'info_cache' on Instance uuid 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1927.288339] env[63371]: DEBUG nova.scheduler.client.report [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1927.337290] env[63371]: DEBUG nova.network.neutron [req-ac5b9a19-8952-40d8-ad35-9e2244a47f94 req-81c9664b-84ff-4dbf-8968-dc647474c166 service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Updated VIF entry in instance network info cache for port 34275fec-e3cb-4276-9619-f3498ff59a3a. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1927.337680] env[63371]: DEBUG nova.network.neutron [req-ac5b9a19-8952-40d8-ad35-9e2244a47f94 req-81c9664b-84ff-4dbf-8968-dc647474c166 service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Updating instance_info_cache with network_info: [{"id": "34275fec-e3cb-4276-9619-f3498ff59a3a", "address": "fa:16:3e:ad:41:b5", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34275fec-e3", "ovs_interfaceid": "34275fec-e3cb-4276-9619-f3498ff59a3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1927.542327] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520a19ec-2527-92ec-4066-8e37e387bfec, 'name': SearchDatastore_Task, 'duration_secs': 0.010083} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.542454] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1927.543025] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1927.543025] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.543025] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.543245] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1927.543440] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5973981b-33fe-40b7-b081-2cc1d9c7036a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.552093] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1927.552299] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1927.553048] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13e70f0f-db9b-462c-a4ba-7472d7014b1c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.558715] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1927.558715] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52715627-9b9b-935e-a455-29e307215c6a" [ 1927.558715] env[63371]: _type = "Task" [ 1927.558715] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.568212] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52715627-9b9b-935e-a455-29e307215c6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.840486] env[63371]: DEBUG oslo_concurrency.lockutils [req-ac5b9a19-8952-40d8-ad35-9e2244a47f94 req-81c9664b-84ff-4dbf-8968-dc647474c166 service nova] Releasing lock "refresh_cache-29791f6c-edec-44b3-828b-0e306d167c42" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.071115] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52715627-9b9b-935e-a455-29e307215c6a, 'name': SearchDatastore_Task, 'duration_secs': 0.01445} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.072911] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cae08e2f-745d-47fe-9827-19a9541d8975 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.077484] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1928.077484] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5277a0ec-ae99-a746-ec10-a5e03ad7e1a8" [ 1928.077484] env[63371]: _type = "Task" [ 1928.077484] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.087132] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5277a0ec-ae99-a746-ec10-a5e03ad7e1a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.302036] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.764s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.354232] env[63371]: DEBUG nova.network.neutron [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [{"id": "01b878e5-651e-49f1-959f-7da17291c0bc", "address": "fa:16:3e:b7:c4:0c", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01b878e5-65", "ovs_interfaceid": "01b878e5-651e-49f1-959f-7da17291c0bc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.587949] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5277a0ec-ae99-a746-ec10-a5e03ad7e1a8, 'name': SearchDatastore_Task, 'duration_secs': 0.009669} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.588228] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.588482] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 29791f6c-edec-44b3-828b-0e306d167c42/29791f6c-edec-44b3-828b-0e306d167c42.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1928.588731] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1ac5570-c002-4762-ac3b-1cbd42549b12 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.595639] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1928.595639] env[63371]: value = "task-1775066" [ 1928.595639] env[63371]: _type = "Task" [ 1928.595639] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.603661] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.856622] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.856956] env[63371]: DEBUG nova.objects.instance [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'migration_context' on Instance uuid 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1929.106486] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775066, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.359759] env[63371]: DEBUG nova.objects.base [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Object Instance<88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec> lazy-loaded attributes: info_cache,migration_context {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1929.360748] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55423e66-08e3-4dd1-8325-bae2e79d3c83 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.384670] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd8ba6f0-8aaa-4175-84a8-4b807aaeb718 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.390206] env[63371]: DEBUG oslo_vmware.api [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1929.390206] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ed7c8c-6de9-7694-c55c-c5939606934e" [ 1929.390206] env[63371]: _type = "Task" [ 1929.390206] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.398067] env[63371]: DEBUG oslo_vmware.api [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ed7c8c-6de9-7694-c55c-c5939606934e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.606545] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527004} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.606841] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 29791f6c-edec-44b3-828b-0e306d167c42/29791f6c-edec-44b3-828b-0e306d167c42.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1929.607068] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1929.607319] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-41f6ab24-804a-4f27-a56d-ec59cedaa8d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.613318] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1929.613318] env[63371]: value = "task-1775067" [ 1929.613318] env[63371]: _type = "Task" [ 1929.613318] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.621497] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775067, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.844794] env[63371]: INFO nova.compute.manager [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Swapping old allocation on dict_keys(['c079ebb1-2fa2-4df9-bdab-118e305653c1']) held by migration 14f006ac-29e5-4cd6-9ce5-48006f177492 for instance [ 1929.876833] env[63371]: DEBUG nova.scheduler.client.report [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Overwriting current allocation {'allocations': {'c079ebb1-2fa2-4df9-bdab-118e305653c1': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 175}}, 'project_id': 'ceecd2a995cf4da0b4218e371065ca0b', 'user_id': 'd08b55f9fa3a45b0a8672e955ee360c1', 'consumer_generation': 1} on consumer 05f6f94a-c9c4-4737-8b07-77e9c2093497 {{(pid=63371) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 1929.904267] env[63371]: DEBUG oslo_vmware.api [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ed7c8c-6de9-7694-c55c-c5939606934e, 'name': SearchDatastore_Task, 'duration_secs': 0.006634} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.904568] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.904811] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.974554] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1929.974554] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1929.974715] env[63371]: DEBUG nova.network.neutron [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1930.126754] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775067, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086741} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.127109] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1930.127926] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f883ab6-ee4e-474f-99d4-3b3eb550d3d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.150649] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 29791f6c-edec-44b3-828b-0e306d167c42/29791f6c-edec-44b3-828b-0e306d167c42.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1930.150770] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96f69dc6-7b6e-425e-ad36-4ce6d223f3d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.170472] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1930.170472] env[63371]: value = "task-1775069" [ 1930.170472] env[63371]: _type = "Task" [ 1930.170472] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.178287] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775069, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.549967] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00de7b73-414b-4403-922f-2d9f17f9c84f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.560342] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec63df7b-b284-4553-9507-b8992e83f233 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.594411] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81b6dea-0a84-4847-b34f-386e67930c69 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.601766] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a279fe58-1fc0-4787-aac5-d235220f1063 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.615976] env[63371]: DEBUG nova.compute.provider_tree [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1930.684025] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775069, 'name': ReconfigVM_Task, 'duration_secs': 0.268217} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.684025] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 29791f6c-edec-44b3-828b-0e306d167c42/29791f6c-edec-44b3-828b-0e306d167c42.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1930.684687] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b6b6f2e6-7871-4628-b487-69ca16f96816 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.695058] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1930.695058] env[63371]: value = "task-1775070" [ 1930.695058] env[63371]: _type = "Task" [ 1930.695058] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.705399] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775070, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.824298] env[63371]: DEBUG nova.network.neutron [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance_info_cache with network_info: [{"id": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "address": "fa:16:3e:b6:f8:d2", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5a88ea10-92", "ovs_interfaceid": "5a88ea10-929b-41c9-b1b4-bf61377715c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.119032] env[63371]: DEBUG nova.scheduler.client.report [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1931.208673] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775070, 'name': Rename_Task, 'duration_secs': 0.142776} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.208972] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1931.211390] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8c54ebd9-8d7a-459e-9c93-344d779cdbf0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.216177] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1931.216177] env[63371]: value = "task-1775071" [ 1931.216177] env[63371]: _type = "Task" [ 1931.216177] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.229701] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775071, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.328510] env[63371]: DEBUG oslo_concurrency.lockutils [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-05f6f94a-c9c4-4737-8b07-77e9c2093497" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.329536] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89665cbf-b7cd-4edb-9c63-be95a39a9f3d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.336783] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf37ccf-eca6-4a8e-93ab-cd34028d6098 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.726961] env[63371]: DEBUG oslo_vmware.api [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775071, 'name': PowerOnVM_Task, 'duration_secs': 0.487592} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.727299] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1931.727650] env[63371]: INFO nova.compute.manager [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Took 7.61 seconds to spawn the instance on the hypervisor. [ 1931.727869] env[63371]: DEBUG nova.compute.manager [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1931.728701] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52001ae0-94f6-4f26-8710-852667c8e0cf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.794626] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.795041] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1932.130717] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.225s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1932.245259] env[63371]: INFO nova.compute.manager [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Took 15.26 seconds to build instance. [ 1932.297755] env[63371]: DEBUG nova.compute.manager [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1932.435782] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1932.436479] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27c807d6-4ca9-44b0-b30b-f8357e9147da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.443890] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1932.443890] env[63371]: value = "task-1775073" [ 1932.443890] env[63371]: _type = "Task" [ 1932.443890] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.451236] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775073, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.538445] env[63371]: DEBUG nova.compute.manager [req-557f02b5-b19a-4278-93e4-2cef8a55f94f req-fc22a803-9bb3-4dd2-9976-323bf3cc1a5e service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Received event network-changed-34275fec-e3cb-4276-9619-f3498ff59a3a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1932.538659] env[63371]: DEBUG nova.compute.manager [req-557f02b5-b19a-4278-93e4-2cef8a55f94f req-fc22a803-9bb3-4dd2-9976-323bf3cc1a5e service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Refreshing instance network info cache due to event network-changed-34275fec-e3cb-4276-9619-f3498ff59a3a. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1932.538987] env[63371]: DEBUG oslo_concurrency.lockutils [req-557f02b5-b19a-4278-93e4-2cef8a55f94f req-fc22a803-9bb3-4dd2-9976-323bf3cc1a5e service nova] Acquiring lock "refresh_cache-29791f6c-edec-44b3-828b-0e306d167c42" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1932.538987] env[63371]: DEBUG oslo_concurrency.lockutils [req-557f02b5-b19a-4278-93e4-2cef8a55f94f req-fc22a803-9bb3-4dd2-9976-323bf3cc1a5e service nova] Acquired lock "refresh_cache-29791f6c-edec-44b3-828b-0e306d167c42" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.539141] env[63371]: DEBUG nova.network.neutron [req-557f02b5-b19a-4278-93e4-2cef8a55f94f req-fc22a803-9bb3-4dd2-9976-323bf3cc1a5e service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Refreshing network info cache for port 34275fec-e3cb-4276-9619-f3498ff59a3a {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1932.691617] env[63371]: INFO nova.scheduler.client.report [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleted allocation for migration 699a4f1e-a25a-43be-bdeb-a7b17b4169f3 [ 1932.747450] env[63371]: DEBUG oslo_concurrency.lockutils [None req-37dcae9b-6180-492e-a89d-f42364a1bc89 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "29791f6c-edec-44b3-828b-0e306d167c42" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.783s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1932.819548] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1932.819836] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1932.822293] env[63371]: INFO nova.compute.claims [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1932.954488] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775073, 'name': PowerOffVM_Task, 'duration_secs': 0.395534} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.955246] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1932.955569] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1932.955786] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1932.955941] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1932.956182] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1932.956279] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1932.956431] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1932.956631] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1932.956787] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1932.956952] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1932.957166] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1932.957351] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1932.962572] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c21241e-b2ab-4fea-8a1a-1cc10f14eb8c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.978439] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1932.978439] env[63371]: value = "task-1775074" [ 1932.978439] env[63371]: _type = "Task" [ 1932.978439] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.988933] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775074, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.197203] env[63371]: DEBUG oslo_concurrency.lockutils [None req-90c5b59a-13dc-44fb-98ef-7f86a01d8e33 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.696s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.348462] env[63371]: DEBUG nova.network.neutron [req-557f02b5-b19a-4278-93e4-2cef8a55f94f req-fc22a803-9bb3-4dd2-9976-323bf3cc1a5e service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Updated VIF entry in instance network info cache for port 34275fec-e3cb-4276-9619-f3498ff59a3a. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1933.348842] env[63371]: DEBUG nova.network.neutron [req-557f02b5-b19a-4278-93e4-2cef8a55f94f req-fc22a803-9bb3-4dd2-9976-323bf3cc1a5e service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Updating instance_info_cache with network_info: [{"id": "34275fec-e3cb-4276-9619-f3498ff59a3a", "address": "fa:16:3e:ad:41:b5", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap34275fec-e3", "ovs_interfaceid": "34275fec-e3cb-4276-9619-f3498ff59a3a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.489159] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775074, 'name': ReconfigVM_Task, 'duration_secs': 0.270051} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.490227] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea71054d-b8bf-47c3-976d-d65655e99134 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.512315] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1933.512574] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1933.512733] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1933.512914] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1933.513076] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1933.513231] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1933.513435] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1933.513608] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1933.513750] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1933.513947] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1933.514151] env[63371]: DEBUG nova.virt.hardware [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1933.515057] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-515b27d6-2940-4383-b17b-f8159d4aae9c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.520942] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1933.520942] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5293f407-a7ad-078f-e491-670fe911699a" [ 1933.520942] env[63371]: _type = "Task" [ 1933.520942] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.529524] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5293f407-a7ad-078f-e491-670fe911699a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.853310] env[63371]: DEBUG oslo_concurrency.lockutils [req-557f02b5-b19a-4278-93e4-2cef8a55f94f req-fc22a803-9bb3-4dd2-9976-323bf3cc1a5e service nova] Releasing lock "refresh_cache-29791f6c-edec-44b3-828b-0e306d167c42" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.903752] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.904057] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.904279] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1933.904466] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1933.904637] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.909301] env[63371]: INFO nova.compute.manager [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Terminating instance [ 1933.911188] env[63371]: DEBUG nova.compute.manager [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1933.911392] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1933.912280] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf5ec69f-2df1-4624-bdb1-357caf9aa423 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.920393] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1933.922970] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f13b138-a667-45d5-872d-bf19a79919a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.930437] env[63371]: DEBUG oslo_vmware.api [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1933.930437] env[63371]: value = "task-1775076" [ 1933.930437] env[63371]: _type = "Task" [ 1933.930437] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.941214] env[63371]: DEBUG oslo_vmware.api [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775076, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.977316] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a118c5dc-3766-476c-9022-5b6b4cdce625 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.986843] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550b089e-bd1a-422a-9078-6e6e81969dff {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.042467] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd1fecd-93f6-45d2-8f07-b1b7b8770d92 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.059180] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc85217b-1998-4f83-82ee-5d1d4d5c38c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.065801] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5293f407-a7ad-078f-e491-670fe911699a, 'name': SearchDatastore_Task, 'duration_secs': 0.011185} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.075438] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfiguring VM instance instance-00000070 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1934.076481] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4724a8b-8428-4985-82e4-b4da481b8f5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.113506] env[63371]: DEBUG nova.compute.provider_tree [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1934.126147] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1934.126147] env[63371]: value = "task-1775077" [ 1934.126147] env[63371]: _type = "Task" [ 1934.126147] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.136890] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775077, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.441838] env[63371]: DEBUG oslo_vmware.api [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775076, 'name': PowerOffVM_Task, 'duration_secs': 0.278106} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.442226] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1934.442485] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1934.442851] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0ed05dac-7ae4-47d1-aac1-3e7426d05d5e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.514058] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1934.514058] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1934.514058] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleting the datastore file [datastore1] 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1934.514442] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-855e48b4-2750-4907-9d34-b125f6eb5e5a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.520914] env[63371]: DEBUG oslo_vmware.api [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1934.520914] env[63371]: value = "task-1775079" [ 1934.520914] env[63371]: _type = "Task" [ 1934.520914] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.530919] env[63371]: DEBUG oslo_vmware.api [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775079, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.618753] env[63371]: DEBUG nova.scheduler.client.report [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1934.636566] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775077, 'name': ReconfigVM_Task, 'duration_secs': 0.235571} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.637570] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfigured VM instance instance-00000070 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1934.638438] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-825a7150-7de8-46d4-96e4-f3c83d36b157 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.666546] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 05f6f94a-c9c4-4737-8b07-77e9c2093497/05f6f94a-c9c4-4737-8b07-77e9c2093497.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1934.667218] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7827c2cb-642a-4761-a8a1-2c79adb33633 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.687630] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1934.687630] env[63371]: value = "task-1775080" [ 1934.687630] env[63371]: _type = "Task" [ 1934.687630] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.697824] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775080, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.031645] env[63371]: DEBUG oslo_vmware.api [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775079, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164115} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.031944] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1935.032157] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1935.032342] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1935.032530] env[63371]: INFO nova.compute.manager [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1935.032785] env[63371]: DEBUG oslo.service.loopingcall [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1935.032994] env[63371]: DEBUG nova.compute.manager [-] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1935.033713] env[63371]: DEBUG nova.network.neutron [-] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1935.123241] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.303s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.123853] env[63371]: DEBUG nova.compute.manager [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1935.197209] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775080, 'name': ReconfigVM_Task, 'duration_secs': 0.344479} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.197494] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 05f6f94a-c9c4-4737-8b07-77e9c2093497/05f6f94a-c9c4-4737-8b07-77e9c2093497.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1935.198613] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae851803-5b86-47b9-b12f-9a47640b283f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.219905] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5f0bff-6a56-48be-8ef4-2ca426757b13 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.241404] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61284ba4-bbc1-4c3f-b203-280dddd6c6ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.261764] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb93861-7a15-4d29-b7ef-9866466775ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.268591] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1935.268834] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a4355fc-f785-4301-938a-5f386f22d11f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.274803] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1935.274803] env[63371]: value = "task-1775081" [ 1935.274803] env[63371]: _type = "Task" [ 1935.274803] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.282075] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775081, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.460027] env[63371]: DEBUG nova.compute.manager [req-5ed2e35b-126a-4452-a11f-66529793baa5 req-93170e25-c651-4904-bb77-f1983572d705 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Received event network-vif-deleted-01b878e5-651e-49f1-959f-7da17291c0bc {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1935.460027] env[63371]: INFO nova.compute.manager [req-5ed2e35b-126a-4452-a11f-66529793baa5 req-93170e25-c651-4904-bb77-f1983572d705 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Neutron deleted interface 01b878e5-651e-49f1-959f-7da17291c0bc; detaching it from the instance and deleting it from the info cache [ 1935.460027] env[63371]: DEBUG nova.network.neutron [req-5ed2e35b-126a-4452-a11f-66529793baa5 req-93170e25-c651-4904-bb77-f1983572d705 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.631331] env[63371]: DEBUG nova.compute.utils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1935.632934] env[63371]: DEBUG nova.compute.manager [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1935.633549] env[63371]: DEBUG nova.network.neutron [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1935.696522] env[63371]: DEBUG nova.policy [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0af72e7f1e644797b480011450d02e02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da713632f95146f1986c0d8a9e529ca0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1935.795521] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775081, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.935944] env[63371]: DEBUG nova.network.neutron [-] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1935.963159] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01607872-093a-4a03-ab73-c56dabd3c297 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.974586] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fca62fab-c20b-4848-a813-d477e041a016 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.023388] env[63371]: DEBUG nova.compute.manager [req-5ed2e35b-126a-4452-a11f-66529793baa5 req-93170e25-c651-4904-bb77-f1983572d705 service nova] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Detach interface failed, port_id=01b878e5-651e-49f1-959f-7da17291c0bc, reason: Instance 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1936.136855] env[63371]: DEBUG nova.compute.manager [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1936.140375] env[63371]: DEBUG nova.network.neutron [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Successfully created port: 14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1936.292983] env[63371]: DEBUG oslo_vmware.api [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775081, 'name': PowerOnVM_Task, 'duration_secs': 0.786055} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.292983] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1936.439073] env[63371]: INFO nova.compute.manager [-] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Took 1.41 seconds to deallocate network for instance. [ 1936.948030] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1936.948350] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1936.948350] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.970639] env[63371]: INFO nova.scheduler.client.report [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleted allocations for instance 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec [ 1937.092266] env[63371]: DEBUG oslo_concurrency.lockutils [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.092503] env[63371]: DEBUG oslo_concurrency.lockutils [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.150531] env[63371]: DEBUG nova.compute.manager [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1937.183739] env[63371]: DEBUG nova.virt.hardware [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1937.184010] env[63371]: DEBUG nova.virt.hardware [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1937.184182] env[63371]: DEBUG nova.virt.hardware [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1937.184363] env[63371]: DEBUG nova.virt.hardware [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1937.184510] env[63371]: DEBUG nova.virt.hardware [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1937.184658] env[63371]: DEBUG nova.virt.hardware [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1937.184863] env[63371]: DEBUG nova.virt.hardware [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1937.185021] env[63371]: DEBUG nova.virt.hardware [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1937.185198] env[63371]: DEBUG nova.virt.hardware [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1937.185358] env[63371]: DEBUG nova.virt.hardware [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1937.185523] env[63371]: DEBUG nova.virt.hardware [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1937.186480] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f531c3a-22c6-419d-bb93-a01491f82c69 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.193897] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ab6b86-2d99-4c7f-b4d0-17d9fb7e6078 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.339661] env[63371]: INFO nova.compute.manager [None req-02e05bad-11f2-4a27-b025-a3c81b00ff04 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance to original state: 'active' [ 1937.479319] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c89ad681-dc97-495c-88b0-db6ece6479a3 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.575s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.597504] env[63371]: INFO nova.compute.manager [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Detaching volume 5f60ff6a-450b-4518-8917-11df550c2ac1 [ 1937.602334] env[63371]: DEBUG nova.compute.manager [req-06387cb9-7814-497b-8dc7-bf1a4c1aba30 req-72cc8965-3a86-4046-88c4-3ee553669633 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Received event network-vif-plugged-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1937.602567] env[63371]: DEBUG oslo_concurrency.lockutils [req-06387cb9-7814-497b-8dc7-bf1a4c1aba30 req-72cc8965-3a86-4046-88c4-3ee553669633 service nova] Acquiring lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.602778] env[63371]: DEBUG oslo_concurrency.lockutils [req-06387cb9-7814-497b-8dc7-bf1a4c1aba30 req-72cc8965-3a86-4046-88c4-3ee553669633 service nova] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.602968] env[63371]: DEBUG oslo_concurrency.lockutils [req-06387cb9-7814-497b-8dc7-bf1a4c1aba30 req-72cc8965-3a86-4046-88c4-3ee553669633 service nova] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1937.603212] env[63371]: DEBUG nova.compute.manager [req-06387cb9-7814-497b-8dc7-bf1a4c1aba30 req-72cc8965-3a86-4046-88c4-3ee553669633 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] No waiting events found dispatching network-vif-plugged-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1937.603388] env[63371]: WARNING nova.compute.manager [req-06387cb9-7814-497b-8dc7-bf1a4c1aba30 req-72cc8965-3a86-4046-88c4-3ee553669633 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Received unexpected event network-vif-plugged-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e for instance with vm_state building and task_state spawning. [ 1937.638182] env[63371]: INFO nova.virt.block_device [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Attempting to driver detach volume 5f60ff6a-450b-4518-8917-11df550c2ac1 from mountpoint /dev/sdb [ 1937.638182] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1937.638182] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368505', 'volume_id': '5f60ff6a-450b-4518-8917-11df550c2ac1', 'name': 'volume-5f60ff6a-450b-4518-8917-11df550c2ac1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ff724a9f-5e9a-4683-8eb3-058fb3639ea5', 'attached_at': '', 'detached_at': '', 'volume_id': '5f60ff6a-450b-4518-8917-11df550c2ac1', 'serial': '5f60ff6a-450b-4518-8917-11df550c2ac1'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1937.638182] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdc75c2-bf27-4001-8957-5615b4431a07 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.661062] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee9d843-fd13-4bc1-879c-cd4384239397 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.668944] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40712e1b-d84b-4a19-8f28-03544b2dcb02 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.691606] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f54798-14a1-42c9-9d78-feb9eedcb87d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.711489] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] The volume has not been displaced from its original location: [datastore1] volume-5f60ff6a-450b-4518-8917-11df550c2ac1/volume-5f60ff6a-450b-4518-8917-11df550c2ac1.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1937.717330] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Reconfiguring VM instance instance-0000006f to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1937.718641] env[63371]: DEBUG nova.network.neutron [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Successfully updated port: 14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1937.720167] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15608d66-c9ed-4ece-9c56-2dec7e5d8fc1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.735529] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1937.735704] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1937.735803] env[63371]: DEBUG nova.network.neutron [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1937.742854] env[63371]: DEBUG oslo_vmware.api [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1937.742854] env[63371]: value = "task-1775083" [ 1937.742854] env[63371]: _type = "Task" [ 1937.742854] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.755546] env[63371]: DEBUG oslo_vmware.api [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775083, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.253338] env[63371]: DEBUG oslo_vmware.api [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775083, 'name': ReconfigVM_Task, 'duration_secs': 0.265494} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.253668] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Reconfigured VM instance instance-0000006f to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1938.258931] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d53cb4c-f545-42e5-8063-4c7efce5d7fe {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.270632] env[63371]: DEBUG nova.network.neutron [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1938.278238] env[63371]: DEBUG oslo_vmware.api [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1938.278238] env[63371]: value = "task-1775084" [ 1938.278238] env[63371]: _type = "Task" [ 1938.278238] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.286852] env[63371]: DEBUG oslo_vmware.api [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775084, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.465228] env[63371]: DEBUG nova.network.neutron [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updating instance_info_cache with network_info: [{"id": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "address": "fa:16:3e:2e:85:be", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d87d33-0a", "ovs_interfaceid": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1938.789045] env[63371]: DEBUG oslo_vmware.api [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775084, 'name': ReconfigVM_Task, 'duration_secs': 0.148143} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.789045] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368505', 'volume_id': '5f60ff6a-450b-4518-8917-11df550c2ac1', 'name': 'volume-5f60ff6a-450b-4518-8917-11df550c2ac1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'ff724a9f-5e9a-4683-8eb3-058fb3639ea5', 'attached_at': '', 'detached_at': '', 'volume_id': '5f60ff6a-450b-4518-8917-11df550c2ac1', 'serial': '5f60ff6a-450b-4518-8917-11df550c2ac1'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1938.913568] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1938.913802] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1938.967692] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.968017] env[63371]: DEBUG nova.compute.manager [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Instance network_info: |[{"id": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "address": "fa:16:3e:2e:85:be", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d87d33-0a", "ovs_interfaceid": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1938.968527] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:85:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14d87d33-0ac4-480f-b86e-c9e13b3e3e4e', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1938.977567] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating folder: Project (da713632f95146f1986c0d8a9e529ca0). Parent ref: group-v368199. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1938.977848] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4b60726d-1c35-4290-a51f-f2184552d0e9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.989725] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Created folder: Project (da713632f95146f1986c0d8a9e529ca0) in parent group-v368199. [ 1938.989906] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating folder: Instances. Parent ref: group-v368513. {{(pid=63371) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1938.990153] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2131582-1cf9-48bc-813e-16fd7ef960ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.000303] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Created folder: Instances in parent group-v368513. [ 1939.000540] env[63371]: DEBUG oslo.service.loopingcall [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1939.000722] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1939.000923] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d71fd70-c9dd-41fc-9e38-4389973d9fb0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.020621] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1939.020621] env[63371]: value = "task-1775087" [ 1939.020621] env[63371]: _type = "Task" [ 1939.020621] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.031503] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775087, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.247059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.247059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.247059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "05f6f94a-c9c4-4737-8b07-77e9c2093497-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.247059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.247059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.249170] env[63371]: INFO nova.compute.manager [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Terminating instance [ 1939.253498] env[63371]: DEBUG nova.compute.manager [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1939.253780] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1939.253996] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-738caf3b-98ff-491a-8e2d-69abb9064d47 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.261118] env[63371]: DEBUG oslo_vmware.api [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1939.261118] env[63371]: value = "task-1775089" [ 1939.261118] env[63371]: _type = "Task" [ 1939.261118] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.269754] env[63371]: DEBUG oslo_vmware.api [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775089, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.335710] env[63371]: DEBUG nova.objects.instance [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lazy-loading 'flavor' on Instance uuid ff724a9f-5e9a-4683-8eb3-058fb3639ea5 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1939.416753] env[63371]: DEBUG nova.compute.manager [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1939.530553] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775087, 'name': CreateVM_Task, 'duration_secs': 0.502549} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.530733] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1939.531439] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.531612] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1939.531946] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1939.532219] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fbc11c1-e091-4374-9ef6-c2c17e25958c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.536513] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1939.536513] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ec73dd-299c-27bb-fe70-47ff1b294b7d" [ 1939.536513] env[63371]: _type = "Task" [ 1939.536513] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.543822] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ec73dd-299c-27bb-fe70-47ff1b294b7d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.627620] env[63371]: DEBUG nova.compute.manager [req-d16377ca-42cd-492c-806d-6e85c37dde19 req-790c0bdf-219b-49dd-8b0e-830799287a06 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Received event network-changed-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1939.627800] env[63371]: DEBUG nova.compute.manager [req-d16377ca-42cd-492c-806d-6e85c37dde19 req-790c0bdf-219b-49dd-8b0e-830799287a06 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Refreshing instance network info cache due to event network-changed-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1939.628033] env[63371]: DEBUG oslo_concurrency.lockutils [req-d16377ca-42cd-492c-806d-6e85c37dde19 req-790c0bdf-219b-49dd-8b0e-830799287a06 service nova] Acquiring lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1939.628187] env[63371]: DEBUG oslo_concurrency.lockutils [req-d16377ca-42cd-492c-806d-6e85c37dde19 req-790c0bdf-219b-49dd-8b0e-830799287a06 service nova] Acquired lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1939.628350] env[63371]: DEBUG nova.network.neutron [req-d16377ca-42cd-492c-806d-6e85c37dde19 req-790c0bdf-219b-49dd-8b0e-830799287a06 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Refreshing network info cache for port 14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1939.770774] env[63371]: DEBUG oslo_vmware.api [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775089, 'name': PowerOffVM_Task, 'duration_secs': 0.223037} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.771008] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1939.771227] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1939.771424] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368508', 'volume_id': '0ae4dd79-9572-4361-935b-a03dac924bed', 'name': 'volume-0ae4dd79-9572-4361-935b-a03dac924bed', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '05f6f94a-c9c4-4737-8b07-77e9c2093497', 'attached_at': '2024-12-11T21:41:28.000000', 'detached_at': '', 'volume_id': '0ae4dd79-9572-4361-935b-a03dac924bed', 'serial': '0ae4dd79-9572-4361-935b-a03dac924bed'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1939.772193] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f42447-e92b-470f-bbb7-b486a5a8d70c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.792503] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be65c7f4-9135-4c90-abda-5d05b604ed21 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.798960] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8d0eba-d2b0-4350-b12d-6466b0eae68b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.818699] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b845cf-17a7-45fa-8323-b538e7ed7a7d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.834799] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] The volume has not been displaced from its original location: [datastore1] volume-0ae4dd79-9572-4361-935b-a03dac924bed/volume-0ae4dd79-9572-4361-935b-a03dac924bed.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1939.840112] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfiguring VM instance instance-00000070 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1939.842156] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87cae1e6-deaa-41a0-b519-7ac837c6ecfb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.862836] env[63371]: DEBUG oslo_vmware.api [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1939.862836] env[63371]: value = "task-1775090" [ 1939.862836] env[63371]: _type = "Task" [ 1939.862836] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.871408] env[63371]: DEBUG oslo_vmware.api [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775090, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.943034] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.943152] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.945208] env[63371]: INFO nova.compute.claims [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1940.047582] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ec73dd-299c-27bb-fe70-47ff1b294b7d, 'name': SearchDatastore_Task, 'duration_secs': 0.074395} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.047899] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.048149] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1940.048382] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1940.048526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1940.048700] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1940.048960] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbe3da2c-100c-40b3-965b-bfffcfc6257b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.056771] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1940.056965] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1940.057641] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f8840b4-fa1c-42d5-a9c9-a3e1d56274fa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.062745] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1940.062745] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52280c4c-dde0-2f24-14eb-eb8a41ff48c2" [ 1940.062745] env[63371]: _type = "Task" [ 1940.062745] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.070052] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52280c4c-dde0-2f24-14eb-eb8a41ff48c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.331909] env[63371]: DEBUG nova.network.neutron [req-d16377ca-42cd-492c-806d-6e85c37dde19 req-790c0bdf-219b-49dd-8b0e-830799287a06 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updated VIF entry in instance network info cache for port 14d87d33-0ac4-480f-b86e-c9e13b3e3e4e. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1940.332296] env[63371]: DEBUG nova.network.neutron [req-d16377ca-42cd-492c-806d-6e85c37dde19 req-790c0bdf-219b-49dd-8b0e-830799287a06 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updating instance_info_cache with network_info: [{"id": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "address": "fa:16:3e:2e:85:be", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d87d33-0a", "ovs_interfaceid": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1940.357988] env[63371]: DEBUG oslo_concurrency.lockutils [None req-144d9f2c-3b55-45f7-a9de-973c4bdd55dd tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.265s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.375026] env[63371]: DEBUG oslo_vmware.api [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775090, 'name': ReconfigVM_Task, 'duration_secs': 0.458947} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.375298] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Reconfigured VM instance instance-00000070 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1940.380107] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c03c2d38-e07d-47e8-b601-78ba949ffabb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.396099] env[63371]: DEBUG oslo_vmware.api [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1940.396099] env[63371]: value = "task-1775091" [ 1940.396099] env[63371]: _type = "Task" [ 1940.396099] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.404727] env[63371]: DEBUG oslo_vmware.api [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775091, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.574483] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52280c4c-dde0-2f24-14eb-eb8a41ff48c2, 'name': SearchDatastore_Task, 'duration_secs': 0.00805} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.575548] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c5eb0fe-f5bd-45f9-9427-19c2031d647e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.580881] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1940.580881] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae68fc-1e2f-90ff-2b9d-2723e3340e9d" [ 1940.580881] env[63371]: _type = "Task" [ 1940.580881] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.588441] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae68fc-1e2f-90ff-2b9d-2723e3340e9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.834716] env[63371]: DEBUG oslo_concurrency.lockutils [req-d16377ca-42cd-492c-806d-6e85c37dde19 req-790c0bdf-219b-49dd-8b0e-830799287a06 service nova] Releasing lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1940.906978] env[63371]: DEBUG oslo_vmware.api [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775091, 'name': ReconfigVM_Task, 'duration_secs': 0.322942} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.906978] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368508', 'volume_id': '0ae4dd79-9572-4361-935b-a03dac924bed', 'name': 'volume-0ae4dd79-9572-4361-935b-a03dac924bed', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '05f6f94a-c9c4-4737-8b07-77e9c2093497', 'attached_at': '2024-12-11T21:41:28.000000', 'detached_at': '', 'volume_id': '0ae4dd79-9572-4361-935b-a03dac924bed', 'serial': '0ae4dd79-9572-4361-935b-a03dac924bed'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1940.906978] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1940.907503] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0177692a-d264-49c4-8e5c-e007acc7671a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.914085] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1940.914349] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-10055b4a-beb2-4a4c-ba6a-52196d4823d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.989938] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1940.993187] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1940.993187] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleting the datastore file [datastore1] 05f6f94a-c9c4-4737-8b07-77e9c2093497 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1940.993187] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca3c5ffd-0099-4363-bcbe-65098346ca3e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.997486] env[63371]: DEBUG oslo_vmware.api [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1940.997486] env[63371]: value = "task-1775093" [ 1940.997486] env[63371]: _type = "Task" [ 1940.997486] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.008953] env[63371]: DEBUG oslo_vmware.api [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775093, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.093300] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ae68fc-1e2f-90ff-2b9d-2723e3340e9d, 'name': SearchDatastore_Task, 'duration_secs': 0.008659} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.094936] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1941.095307] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d/9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1941.096354] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e011bc-d7a5-4749-998c-4035872fbea8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.099413] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e49d416d-ac55-4d75-b52b-124d331a5627 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.105610] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329a1bcc-bd0f-4300-ab6c-cc39d0d17910 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.109550] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1941.109550] env[63371]: value = "task-1775094" [ 1941.109550] env[63371]: _type = "Task" [ 1941.109550] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.138081] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836d123e-767c-4e7c-b4b4-95ed1130d159 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.143766] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.149062] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a81c39f-aafa-424d-9183-2537fe02f69d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.162427] env[63371]: DEBUG nova.compute.provider_tree [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1941.430922] env[63371]: DEBUG oslo_concurrency.lockutils [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.431341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.431341] env[63371]: DEBUG oslo_concurrency.lockutils [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.431513] env[63371]: DEBUG oslo_concurrency.lockutils [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.431679] env[63371]: DEBUG oslo_concurrency.lockutils [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1941.434533] env[63371]: INFO nova.compute.manager [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Terminating instance [ 1941.436505] env[63371]: DEBUG nova.compute.manager [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1941.436717] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1941.437612] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b76ef3-bc3b-4e52-a595-bec8bec3fbd0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.445283] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1941.445885] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-68e0827b-ab52-4deb-be61-4b6343ab68f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.453173] env[63371]: DEBUG oslo_vmware.api [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1941.453173] env[63371]: value = "task-1775096" [ 1941.453173] env[63371]: _type = "Task" [ 1941.453173] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.464214] env[63371]: DEBUG oslo_vmware.api [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775096, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.507847] env[63371]: DEBUG oslo_vmware.api [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775093, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146096} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.508161] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1941.508369] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1941.508552] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1941.508730] env[63371]: INFO nova.compute.manager [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Took 2.26 seconds to destroy the instance on the hypervisor. [ 1941.508981] env[63371]: DEBUG oslo.service.loopingcall [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1941.509201] env[63371]: DEBUG nova.compute.manager [-] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1941.509301] env[63371]: DEBUG nova.network.neutron [-] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1941.619040] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775094, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.666478] env[63371]: DEBUG nova.scheduler.client.report [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1941.962987] env[63371]: DEBUG oslo_vmware.api [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775096, 'name': PowerOffVM_Task, 'duration_secs': 0.235399} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.963285] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1941.963482] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1941.963699] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e5993a0-3846-4d59-b94a-65afe82716b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.981220] env[63371]: DEBUG nova.compute.manager [req-7c8aef32-6245-4c3a-8cee-2e57343edc07 req-4445d645-c019-4a70-aa72-a1193915b221 service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Received event network-vif-deleted-5a88ea10-929b-41c9-b1b4-bf61377715c6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1941.981424] env[63371]: INFO nova.compute.manager [req-7c8aef32-6245-4c3a-8cee-2e57343edc07 req-4445d645-c019-4a70-aa72-a1193915b221 service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Neutron deleted interface 5a88ea10-929b-41c9-b1b4-bf61377715c6; detaching it from the instance and deleting it from the info cache [ 1941.981605] env[63371]: DEBUG nova.network.neutron [req-7c8aef32-6245-4c3a-8cee-2e57343edc07 req-4445d645-c019-4a70-aa72-a1193915b221 service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.120158] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775094, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.172039] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.229s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.175018] env[63371]: DEBUG nova.compute.manager [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1942.455569] env[63371]: DEBUG nova.network.neutron [-] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.484360] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5899ba8-8e13-4d5e-ac69-8256d34cf20d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.493742] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9920fa64-a133-4c6b-9413-2c3d61ed9d1a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.521079] env[63371]: DEBUG nova.compute.manager [req-7c8aef32-6245-4c3a-8cee-2e57343edc07 req-4445d645-c019-4a70-aa72-a1193915b221 service nova] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Detach interface failed, port_id=5a88ea10-929b-41c9-b1b4-bf61377715c6, reason: Instance 05f6f94a-c9c4-4737-8b07-77e9c2093497 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1942.623147] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775094, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.681792] env[63371]: DEBUG nova.compute.utils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1942.683245] env[63371]: DEBUG nova.compute.manager [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1942.683414] env[63371]: DEBUG nova.network.neutron [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1942.721029] env[63371]: DEBUG nova.policy [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f85b2454eed34665b92a1ebc087353c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f98ab0107f5040139ef8be7c3ae22207', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1942.780552] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1942.780906] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1942.781203] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Deleting the datastore file [datastore1] ff724a9f-5e9a-4683-8eb3-058fb3639ea5 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1942.781658] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0fb4e7b-287d-4e0c-87d3-fbca1ade6df6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.789202] env[63371]: DEBUG oslo_vmware.api [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1942.789202] env[63371]: value = "task-1775098" [ 1942.789202] env[63371]: _type = "Task" [ 1942.789202] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.797872] env[63371]: DEBUG oslo_vmware.api [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775098, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.958022] env[63371]: INFO nova.compute.manager [-] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Took 1.45 seconds to deallocate network for instance. [ 1942.992428] env[63371]: DEBUG nova.network.neutron [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Successfully created port: 6fc15567-65bf-42ad-9a0a-1b1cee20b40b {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1943.121812] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775094, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.749323} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.122115] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d/9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1943.122335] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1943.122576] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-693865c3-f8ac-4021-95bc-9d74642ad0dc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.129755] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1943.129755] env[63371]: value = "task-1775099" [ 1943.129755] env[63371]: _type = "Task" [ 1943.129755] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.137419] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775099, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.186426] env[63371]: DEBUG nova.compute.manager [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1943.298794] env[63371]: DEBUG oslo_vmware.api [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775098, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188784} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.299076] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1943.299279] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1943.299452] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1943.299625] env[63371]: INFO nova.compute.manager [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Took 1.86 seconds to destroy the instance on the hypervisor. [ 1943.299897] env[63371]: DEBUG oslo.service.loopingcall [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1943.300142] env[63371]: DEBUG nova.compute.manager [-] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1943.300242] env[63371]: DEBUG nova.network.neutron [-] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1943.502861] env[63371]: INFO nova.compute.manager [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Took 0.54 seconds to detach 1 volumes for instance. [ 1943.643490] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775099, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093623} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1943.643966] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1943.645076] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329707eb-5eac-4b4f-9550-8931e6ad929d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.674912] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d/9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1943.675335] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-43290dc2-446a-4a24-8bf4-71ccc14ea329 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.701780] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1943.701780] env[63371]: value = "task-1775101" [ 1943.701780] env[63371]: _type = "Task" [ 1943.701780] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1943.710719] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775101, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.009977] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1944.010297] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.010494] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.014999] env[63371]: DEBUG nova.compute.manager [req-324b6958-923c-4a5a-bba5-54653bb6ceb6 req-1ffc684d-ecb3-41c5-8123-6ecb22835bfc service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Received event network-vif-deleted-baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1944.015245] env[63371]: INFO nova.compute.manager [req-324b6958-923c-4a5a-bba5-54653bb6ceb6 req-1ffc684d-ecb3-41c5-8123-6ecb22835bfc service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Neutron deleted interface baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6; detaching it from the instance and deleting it from the info cache [ 1944.015457] env[63371]: DEBUG nova.network.neutron [req-324b6958-923c-4a5a-bba5-54653bb6ceb6 req-1ffc684d-ecb3-41c5-8123-6ecb22835bfc service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1944.037241] env[63371]: INFO nova.scheduler.client.report [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleted allocations for instance 05f6f94a-c9c4-4737-8b07-77e9c2093497 [ 1944.198655] env[63371]: DEBUG nova.compute.manager [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1944.211352] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775101, 'name': ReconfigVM_Task, 'duration_secs': 0.259057} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.211628] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d/9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1944.212266] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8b16a967-780f-4fd8-b479-0920a1001968 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.219621] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1944.219621] env[63371]: value = "task-1775102" [ 1944.219621] env[63371]: _type = "Task" [ 1944.219621] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.224206] env[63371]: DEBUG nova.virt.hardware [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1944.224435] env[63371]: DEBUG nova.virt.hardware [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1944.224590] env[63371]: DEBUG nova.virt.hardware [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1944.224770] env[63371]: DEBUG nova.virt.hardware [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1944.224936] env[63371]: DEBUG nova.virt.hardware [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1944.225111] env[63371]: DEBUG nova.virt.hardware [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1944.225319] env[63371]: DEBUG nova.virt.hardware [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1944.225474] env[63371]: DEBUG nova.virt.hardware [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1944.225634] env[63371]: DEBUG nova.virt.hardware [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1944.225790] env[63371]: DEBUG nova.virt.hardware [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1944.225959] env[63371]: DEBUG nova.virt.hardware [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1944.226953] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a703c046-50e5-480c-902a-37c0559bfc6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.234388] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775102, 'name': Rename_Task} progress is 10%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.237341] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97cbc514-71c1-43a1-9f2d-1a42a6470255 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.318799] env[63371]: DEBUG nova.network.neutron [-] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1944.518031] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-456a676d-a718-4697-ad8b-b867f8ebb884 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.528137] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5409c68-a742-4e77-9e5b-63f4f82b9bf3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.544574] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5cfb9973-e9ac-4a6e-9c26-44c57d7f3a1a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "05f6f94a-c9c4-4737-8b07-77e9c2093497" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.298s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.563200] env[63371]: DEBUG nova.compute.manager [req-324b6958-923c-4a5a-bba5-54653bb6ceb6 req-1ffc684d-ecb3-41c5-8123-6ecb22835bfc service nova] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Detach interface failed, port_id=baee0e3e-86fe-4d9c-8d85-20fc1c54c5a6, reason: Instance ff724a9f-5e9a-4683-8eb3-058fb3639ea5 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1944.730170] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775102, 'name': Rename_Task, 'duration_secs': 0.138945} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.730472] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1944.731032] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6184446d-1b97-4581-807e-bd4232344da1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.737015] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1944.737015] env[63371]: value = "task-1775103" [ 1944.737015] env[63371]: _type = "Task" [ 1944.737015] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.744298] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775103, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.821446] env[63371]: INFO nova.compute.manager [-] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Took 1.52 seconds to deallocate network for instance. [ 1944.833818] env[63371]: DEBUG nova.network.neutron [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Successfully updated port: 6fc15567-65bf-42ad-9a0a-1b1cee20b40b {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1945.248689] env[63371]: DEBUG oslo_vmware.api [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775103, 'name': PowerOnVM_Task, 'duration_secs': 0.478813} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.248938] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1945.249314] env[63371]: INFO nova.compute.manager [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Took 8.10 seconds to spawn the instance on the hypervisor. [ 1945.249399] env[63371]: DEBUG nova.compute.manager [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1945.250099] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04660045-bb40-40e1-bf7b-fe319c269512 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.329414] env[63371]: DEBUG oslo_concurrency.lockutils [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.329669] env[63371]: DEBUG oslo_concurrency.lockutils [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.329882] env[63371]: DEBUG nova.objects.instance [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lazy-loading 'resources' on Instance uuid ff724a9f-5e9a-4683-8eb3-058fb3639ea5 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1945.337903] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.338063] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.338185] env[63371]: DEBUG nova.network.neutron [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1945.474569] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1945.474800] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.767643] env[63371]: INFO nova.compute.manager [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Took 12.96 seconds to build instance. [ 1945.889253] env[63371]: DEBUG nova.network.neutron [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1945.955673] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c374119-a03c-400b-b30e-386455937260 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.963467] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c912296-b823-4e7c-a2a8-3c04bfcc1cc7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.992852] env[63371]: DEBUG nova.compute.manager [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1945.996502] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043fc90b-e796-4cf1-8ab5-6d248ef0f23c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.003788] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c92bcf8b-6f30-4813-8bee-a3e58ea755c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.018279] env[63371]: DEBUG nova.compute.provider_tree [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1946.047493] env[63371]: DEBUG nova.compute.manager [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Received event network-vif-plugged-6fc15567-65bf-42ad-9a0a-1b1cee20b40b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1946.047710] env[63371]: DEBUG oslo_concurrency.lockutils [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] Acquiring lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.047912] env[63371]: DEBUG oslo_concurrency.lockutils [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] Lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.048090] env[63371]: DEBUG oslo_concurrency.lockutils [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] Lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.048256] env[63371]: DEBUG nova.compute.manager [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] No waiting events found dispatching network-vif-plugged-6fc15567-65bf-42ad-9a0a-1b1cee20b40b {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1946.048417] env[63371]: WARNING nova.compute.manager [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Received unexpected event network-vif-plugged-6fc15567-65bf-42ad-9a0a-1b1cee20b40b for instance with vm_state building and task_state spawning. [ 1946.048572] env[63371]: DEBUG nova.compute.manager [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Received event network-changed-6fc15567-65bf-42ad-9a0a-1b1cee20b40b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1946.048720] env[63371]: DEBUG nova.compute.manager [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Refreshing instance network info cache due to event network-changed-6fc15567-65bf-42ad-9a0a-1b1cee20b40b. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1946.048882] env[63371]: DEBUG oslo_concurrency.lockutils [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] Acquiring lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1946.194488] env[63371]: DEBUG nova.network.neutron [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance_info_cache with network_info: [{"id": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "address": "fa:16:3e:54:78:e9", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc15567-65", "ovs_interfaceid": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.269895] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d103ccdf-555c-4601-bc76-94ed25a0886f tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.475s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.510741] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.522797] env[63371]: DEBUG nova.scheduler.client.report [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1946.696927] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.697297] env[63371]: DEBUG nova.compute.manager [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Instance network_info: |[{"id": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "address": "fa:16:3e:54:78:e9", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc15567-65", "ovs_interfaceid": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1946.697610] env[63371]: DEBUG oslo_concurrency.lockutils [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] Acquired lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1946.697824] env[63371]: DEBUG nova.network.neutron [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Refreshing network info cache for port 6fc15567-65bf-42ad-9a0a-1b1cee20b40b {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1946.698975] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:78:e9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6fc15567-65bf-42ad-9a0a-1b1cee20b40b', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1946.706461] env[63371]: DEBUG oslo.service.loopingcall [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1946.707367] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1946.707604] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7ea10624-b5c7-4f8a-845a-26067044f5a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.727054] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1946.727054] env[63371]: value = "task-1775105" [ 1946.727054] env[63371]: _type = "Task" [ 1946.727054] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.734841] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775105, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.027134] env[63371]: DEBUG oslo_concurrency.lockutils [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.697s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1947.031320] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.519s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1947.031921] env[63371]: INFO nova.compute.claims [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1947.048405] env[63371]: INFO nova.scheduler.client.report [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Deleted allocations for instance ff724a9f-5e9a-4683-8eb3-058fb3639ea5 [ 1947.237977] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775105, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.419882] env[63371]: DEBUG nova.network.neutron [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updated VIF entry in instance network info cache for port 6fc15567-65bf-42ad-9a0a-1b1cee20b40b. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1947.420279] env[63371]: DEBUG nova.network.neutron [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance_info_cache with network_info: [{"id": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "address": "fa:16:3e:54:78:e9", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc15567-65", "ovs_interfaceid": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.556268] env[63371]: DEBUG oslo_concurrency.lockutils [None req-73d8e16a-e5f9-4e2c-b2cc-5ac01a650362 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "ff724a9f-5e9a-4683-8eb3-058fb3639ea5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.125s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1947.738179] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775105, 'name': CreateVM_Task, 'duration_secs': 0.630019} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.738393] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1947.739126] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1947.739320] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1947.739646] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1947.739915] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b4e9c62-33f2-4b7d-993a-cb1c53584f55 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.744582] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1947.744582] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524727a2-e7e0-46f2-8c3f-fddc0efc552c" [ 1947.744582] env[63371]: _type = "Task" [ 1947.744582] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.754137] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524727a2-e7e0-46f2-8c3f-fddc0efc552c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.923261] env[63371]: DEBUG oslo_concurrency.lockutils [req-d478817e-b77a-4c6a-ac93-ccb9cf3e04b0 req-f7d878bd-9385-438a-8a3d-46c2bcb9dc8f service nova] Releasing lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.082893] env[63371]: DEBUG nova.compute.manager [req-f208d041-821b-4f8e-ad15-cd509fcd451c req-520b6474-1dcf-4679-a1b8-84e8c22312c9 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Received event network-changed-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1948.083164] env[63371]: DEBUG nova.compute.manager [req-f208d041-821b-4f8e-ad15-cd509fcd451c req-520b6474-1dcf-4679-a1b8-84e8c22312c9 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Refreshing instance network info cache due to event network-changed-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1948.083309] env[63371]: DEBUG oslo_concurrency.lockutils [req-f208d041-821b-4f8e-ad15-cd509fcd451c req-520b6474-1dcf-4679-a1b8-84e8c22312c9 service nova] Acquiring lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1948.083309] env[63371]: DEBUG oslo_concurrency.lockutils [req-f208d041-821b-4f8e-ad15-cd509fcd451c req-520b6474-1dcf-4679-a1b8-84e8c22312c9 service nova] Acquired lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1948.083487] env[63371]: DEBUG nova.network.neutron [req-f208d041-821b-4f8e-ad15-cd509fcd451c req-520b6474-1dcf-4679-a1b8-84e8c22312c9 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Refreshing network info cache for port 14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1948.146212] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec934b78-e285-4c4d-b082-eeef9e44fcb9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.153580] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15268a8-ca18-4717-8ea3-e8011fea3baa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.182989] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef345bcc-4f98-421d-a38c-694af58e6c74 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.189865] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0043964-ed40-4652-8e17-029d70e5968c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.202631] env[63371]: DEBUG nova.compute.provider_tree [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1948.255340] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524727a2-e7e0-46f2-8c3f-fddc0efc552c, 'name': SearchDatastore_Task, 'duration_secs': 0.008936} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.255643] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.255875] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1948.256121] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1948.256343] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1948.256540] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1948.256799] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab7a8ab3-6fdf-4d29-a8be-26f28f32fad5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.264806] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1948.264967] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1948.265962] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c32525b-1baa-4b39-8559-92c6842cd999 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.270805] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1948.270805] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520ad9e0-5b8d-838e-4f40-8b0f0f23a912" [ 1948.270805] env[63371]: _type = "Task" [ 1948.270805] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.278381] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520ad9e0-5b8d-838e-4f40-8b0f0f23a912, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.724010] env[63371]: ERROR nova.scheduler.client.report [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [req-014eae7b-4aa3-4d7d-925e-cbd8fafd21d0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-014eae7b-4aa3-4d7d-925e-cbd8fafd21d0"}]} [ 1948.739490] env[63371]: DEBUG nova.scheduler.client.report [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1948.752209] env[63371]: DEBUG nova.scheduler.client.report [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1948.752423] env[63371]: DEBUG nova.compute.provider_tree [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1948.762035] env[63371]: DEBUG nova.scheduler.client.report [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1948.780750] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]520ad9e0-5b8d-838e-4f40-8b0f0f23a912, 'name': SearchDatastore_Task, 'duration_secs': 0.007934} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.781521] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-342adf5f-7aae-49d8-8fb6-b9c1b0c3ac8d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.784260] env[63371]: DEBUG nova.scheduler.client.report [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1948.788996] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1948.788996] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52b9f802-7771-2584-3877-03a982de6c68" [ 1948.788996] env[63371]: _type = "Task" [ 1948.788996] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.796496] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b9f802-7771-2584-3877-03a982de6c68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.797248] env[63371]: DEBUG nova.network.neutron [req-f208d041-821b-4f8e-ad15-cd509fcd451c req-520b6474-1dcf-4679-a1b8-84e8c22312c9 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updated VIF entry in instance network info cache for port 14d87d33-0ac4-480f-b86e-c9e13b3e3e4e. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1948.797576] env[63371]: DEBUG nova.network.neutron [req-f208d041-821b-4f8e-ad15-cd509fcd451c req-520b6474-1dcf-4679-a1b8-84e8c22312c9 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updating instance_info_cache with network_info: [{"id": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "address": "fa:16:3e:2e:85:be", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d87d33-0a", "ovs_interfaceid": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1948.871743] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fcaf2b7-7fe8-4159-afe0-8621deb50ba5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.879457] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473bf85e-c76c-44d6-b664-793c7e90e221 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.909039] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b334bb38-939d-465a-a13d-1a0f69aedc81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.915876] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3124a5a0-aafe-4560-8f14-af4655a10aa1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.928548] env[63371]: DEBUG nova.compute.provider_tree [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1949.299149] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52b9f802-7771-2584-3877-03a982de6c68, 'name': SearchDatastore_Task, 'duration_secs': 0.009737} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.299520] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.299658] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 6e9b44fb-153c-4aa8-87ec-04d27ab764ff/6e9b44fb-153c-4aa8-87ec-04d27ab764ff.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1949.299907] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ab4cf7f-12d5-42bc-b709-298a4ef6a633 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.303562] env[63371]: DEBUG oslo_concurrency.lockutils [req-f208d041-821b-4f8e-ad15-cd509fcd451c req-520b6474-1dcf-4679-a1b8-84e8c22312c9 service nova] Releasing lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1949.308617] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1949.308617] env[63371]: value = "task-1775108" [ 1949.308617] env[63371]: _type = "Task" [ 1949.308617] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.315849] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775108, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.460537] env[63371]: DEBUG nova.scheduler.client.report [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 180 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1949.460815] env[63371]: DEBUG nova.compute.provider_tree [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 180 to 181 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1949.460992] env[63371]: DEBUG nova.compute.provider_tree [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1949.818164] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775108, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.42687} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.818428] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 6e9b44fb-153c-4aa8-87ec-04d27ab764ff/6e9b44fb-153c-4aa8-87ec-04d27ab764ff.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1949.818637] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1949.818881] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-746df703-b016-45d0-b162-e2880ffc49a1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.824888] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1949.824888] env[63371]: value = "task-1775110" [ 1949.824888] env[63371]: _type = "Task" [ 1949.824888] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.832727] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775110, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.966044] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.936s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.966630] env[63371]: DEBUG nova.compute.manager [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1950.334980] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775110, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065339} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1950.335371] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1950.336138] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa60707b-651a-4cc7-9ecb-1d062034a4df {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.358672] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 6e9b44fb-153c-4aa8-87ec-04d27ab764ff/6e9b44fb-153c-4aa8-87ec-04d27ab764ff.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1950.358904] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be9b6567-b256-4409-996f-9210ce23b420 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.378895] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1950.378895] env[63371]: value = "task-1775111" [ 1950.378895] env[63371]: _type = "Task" [ 1950.378895] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.386505] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775111, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.472352] env[63371]: DEBUG nova.compute.utils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1950.473984] env[63371]: DEBUG nova.compute.manager [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1950.473984] env[63371]: DEBUG nova.network.neutron [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1950.522513] env[63371]: DEBUG nova.policy [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd08b55f9fa3a45b0a8672e955ee360c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ceecd2a995cf4da0b4218e371065ca0b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1950.777426] env[63371]: DEBUG nova.network.neutron [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Successfully created port: a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1950.890844] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.980023] env[63371]: DEBUG nova.compute.manager [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1951.390871] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775111, 'name': ReconfigVM_Task, 'duration_secs': 0.960732} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.391230] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 6e9b44fb-153c-4aa8-87ec-04d27ab764ff/6e9b44fb-153c-4aa8-87ec-04d27ab764ff.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1951.391827] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bee94a7d-f79a-4a4a-a9f9-fe7c87b9ceef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.398717] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1951.398717] env[63371]: value = "task-1775112" [ 1951.398717] env[63371]: _type = "Task" [ 1951.398717] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.409625] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775112, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.909198] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775112, 'name': Rename_Task, 'duration_secs': 0.130726} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.909457] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1951.909698] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac6f583b-2d47-4760-bf4d-ffad15e43432 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.916374] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1951.916374] env[63371]: value = "task-1775113" [ 1951.916374] env[63371]: _type = "Task" [ 1951.916374] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.924245] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775113, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.987220] env[63371]: DEBUG nova.compute.manager [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1952.013352] env[63371]: DEBUG nova.virt.hardware [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1952.013626] env[63371]: DEBUG nova.virt.hardware [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1952.013784] env[63371]: DEBUG nova.virt.hardware [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1952.014076] env[63371]: DEBUG nova.virt.hardware [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1952.014328] env[63371]: DEBUG nova.virt.hardware [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1952.014495] env[63371]: DEBUG nova.virt.hardware [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1952.014709] env[63371]: DEBUG nova.virt.hardware [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1952.014868] env[63371]: DEBUG nova.virt.hardware [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1952.015119] env[63371]: DEBUG nova.virt.hardware [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1952.015303] env[63371]: DEBUG nova.virt.hardware [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1952.015479] env[63371]: DEBUG nova.virt.hardware [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1952.016345] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7a29ec-9d78-409a-804a-437f08075a79 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.023992] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd2f63b-7414-43f6-9f91-64852318ff42 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.221306] env[63371]: DEBUG nova.compute.manager [req-f029f2c6-1e38-4fab-ae75-5c2f1c7c28a0 req-b253795b-ef83-4324-9a6b-4f5db97f7b64 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Received event network-vif-plugged-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1952.221608] env[63371]: DEBUG oslo_concurrency.lockutils [req-f029f2c6-1e38-4fab-ae75-5c2f1c7c28a0 req-b253795b-ef83-4324-9a6b-4f5db97f7b64 service nova] Acquiring lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.221867] env[63371]: DEBUG oslo_concurrency.lockutils [req-f029f2c6-1e38-4fab-ae75-5c2f1c7c28a0 req-b253795b-ef83-4324-9a6b-4f5db97f7b64 service nova] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.222156] env[63371]: DEBUG oslo_concurrency.lockutils [req-f029f2c6-1e38-4fab-ae75-5c2f1c7c28a0 req-b253795b-ef83-4324-9a6b-4f5db97f7b64 service nova] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.222446] env[63371]: DEBUG nova.compute.manager [req-f029f2c6-1e38-4fab-ae75-5c2f1c7c28a0 req-b253795b-ef83-4324-9a6b-4f5db97f7b64 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] No waiting events found dispatching network-vif-plugged-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1952.222749] env[63371]: WARNING nova.compute.manager [req-f029f2c6-1e38-4fab-ae75-5c2f1c7c28a0 req-b253795b-ef83-4324-9a6b-4f5db97f7b64 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Received unexpected event network-vif-plugged-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 for instance with vm_state building and task_state spawning. [ 1952.373038] env[63371]: DEBUG nova.network.neutron [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Successfully updated port: a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1952.427698] env[63371]: DEBUG oslo_vmware.api [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775113, 'name': PowerOnVM_Task, 'duration_secs': 0.439126} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.428072] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1952.428229] env[63371]: INFO nova.compute.manager [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Took 8.23 seconds to spawn the instance on the hypervisor. [ 1952.428458] env[63371]: DEBUG nova.compute.manager [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1952.429326] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f175c5b-4590-4c67-91c6-c6d0949ac6d6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.699716] env[63371]: DEBUG oslo_concurrency.lockutils [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.700016] env[63371]: DEBUG oslo_concurrency.lockutils [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.700316] env[63371]: DEBUG oslo_concurrency.lockutils [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1952.700510] env[63371]: DEBUG oslo_concurrency.lockutils [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1952.700682] env[63371]: DEBUG oslo_concurrency.lockutils [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1952.702779] env[63371]: INFO nova.compute.manager [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Terminating instance [ 1952.704664] env[63371]: DEBUG nova.compute.manager [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1952.704857] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1952.705720] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f1a4ec-94e8-45c9-bde9-f19fa39a6790 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.713544] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1952.713763] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01e63a0e-bb8f-4e3b-9dfa-c1d3595863cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.719254] env[63371]: DEBUG oslo_vmware.api [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1952.719254] env[63371]: value = "task-1775114" [ 1952.719254] env[63371]: _type = "Task" [ 1952.719254] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.727321] env[63371]: DEBUG oslo_vmware.api [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775114, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.876252] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1952.876422] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.876599] env[63371]: DEBUG nova.network.neutron [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1952.950039] env[63371]: INFO nova.compute.manager [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Took 13.03 seconds to build instance. [ 1953.172932] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "0a174705-f4ec-407c-b7ea-0945d5db46cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.173295] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "0a174705-f4ec-407c-b7ea-0945d5db46cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.228908] env[63371]: DEBUG oslo_vmware.api [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775114, 'name': PowerOffVM_Task, 'duration_secs': 0.304464} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.229187] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1953.229375] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1953.229626] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-46a9af54-1add-481d-8290-f22f4e1b07ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.314223] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1953.314437] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1953.314614] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Deleting the datastore file [datastore1] 1ec21edd-7b7c-4a2b-983f-8aa6c022e033 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1953.314877] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-486891b8-958b-45a8-978c-a2d8621c4f2a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.321176] env[63371]: DEBUG oslo_vmware.api [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1953.321176] env[63371]: value = "task-1775116" [ 1953.321176] env[63371]: _type = "Task" [ 1953.321176] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.323525] env[63371]: DEBUG nova.compute.manager [req-e4e08c83-02cb-4412-a094-d841b9d476ee req-0e541a58-7e3c-40bc-9db3-cdb17a2976a8 service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Received event network-changed-6fc15567-65bf-42ad-9a0a-1b1cee20b40b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1953.323525] env[63371]: DEBUG nova.compute.manager [req-e4e08c83-02cb-4412-a094-d841b9d476ee req-0e541a58-7e3c-40bc-9db3-cdb17a2976a8 service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Refreshing instance network info cache due to event network-changed-6fc15567-65bf-42ad-9a0a-1b1cee20b40b. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1953.323718] env[63371]: DEBUG oslo_concurrency.lockutils [req-e4e08c83-02cb-4412-a094-d841b9d476ee req-0e541a58-7e3c-40bc-9db3-cdb17a2976a8 service nova] Acquiring lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.323844] env[63371]: DEBUG oslo_concurrency.lockutils [req-e4e08c83-02cb-4412-a094-d841b9d476ee req-0e541a58-7e3c-40bc-9db3-cdb17a2976a8 service nova] Acquired lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1953.324053] env[63371]: DEBUG nova.network.neutron [req-e4e08c83-02cb-4412-a094-d841b9d476ee req-0e541a58-7e3c-40bc-9db3-cdb17a2976a8 service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Refreshing network info cache for port 6fc15567-65bf-42ad-9a0a-1b1cee20b40b {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1953.333755] env[63371]: DEBUG oslo_vmware.api [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775116, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.410778] env[63371]: DEBUG nova.network.neutron [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1953.452103] env[63371]: DEBUG oslo_concurrency.lockutils [None req-ef718a00-8686-4ef1-bb58-eb67a4200d42 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.538s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.547187] env[63371]: DEBUG nova.network.neutron [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updating instance_info_cache with network_info: [{"id": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "address": "fa:16:3e:00:9c:75", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa21caeee-a9", "ovs_interfaceid": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1953.675216] env[63371]: DEBUG nova.compute.manager [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 1953.834776] env[63371]: DEBUG oslo_vmware.api [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775116, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.377212} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.835516] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1953.835618] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1953.835729] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1953.835900] env[63371]: INFO nova.compute.manager [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1953.836159] env[63371]: DEBUG oslo.service.loopingcall [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1953.836377] env[63371]: DEBUG nova.compute.manager [-] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1953.836472] env[63371]: DEBUG nova.network.neutron [-] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1954.050388] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1954.050770] env[63371]: DEBUG nova.compute.manager [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Instance network_info: |[{"id": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "address": "fa:16:3e:00:9c:75", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa21caeee-a9", "ovs_interfaceid": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1954.051384] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:9c:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3a0ddd7d-c321-4187-bdd8-b19044ea2c4a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a21caeee-a9c4-4ead-8c4e-4dc84446b5b4', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1954.064647] env[63371]: DEBUG oslo.service.loopingcall [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1954.064959] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1954.065312] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b0f7b358-0330-4fde-8f38-cb3fefd18138 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.100340] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1954.100340] env[63371]: value = "task-1775117" [ 1954.100340] env[63371]: _type = "Task" [ 1954.100340] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.111294] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775117, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.202203] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.202529] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1954.205210] env[63371]: INFO nova.compute.claims [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1954.219530] env[63371]: DEBUG nova.network.neutron [req-e4e08c83-02cb-4412-a094-d841b9d476ee req-0e541a58-7e3c-40bc-9db3-cdb17a2976a8 service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updated VIF entry in instance network info cache for port 6fc15567-65bf-42ad-9a0a-1b1cee20b40b. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1954.219876] env[63371]: DEBUG nova.network.neutron [req-e4e08c83-02cb-4412-a094-d841b9d476ee req-0e541a58-7e3c-40bc-9db3-cdb17a2976a8 service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance_info_cache with network_info: [{"id": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "address": "fa:16:3e:54:78:e9", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc15567-65", "ovs_interfaceid": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.254309] env[63371]: DEBUG nova.compute.manager [req-e138639b-386c-48ac-b3b6-b80aefc12b56 req-4b4c043a-6bcc-4ec3-b153-2bcdbeda4688 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Received event network-changed-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1954.254402] env[63371]: DEBUG nova.compute.manager [req-e138639b-386c-48ac-b3b6-b80aefc12b56 req-4b4c043a-6bcc-4ec3-b153-2bcdbeda4688 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Refreshing instance network info cache due to event network-changed-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1954.254524] env[63371]: DEBUG oslo_concurrency.lockutils [req-e138639b-386c-48ac-b3b6-b80aefc12b56 req-4b4c043a-6bcc-4ec3-b153-2bcdbeda4688 service nova] Acquiring lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1954.254702] env[63371]: DEBUG oslo_concurrency.lockutils [req-e138639b-386c-48ac-b3b6-b80aefc12b56 req-4b4c043a-6bcc-4ec3-b153-2bcdbeda4688 service nova] Acquired lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1954.254910] env[63371]: DEBUG nova.network.neutron [req-e138639b-386c-48ac-b3b6-b80aefc12b56 req-4b4c043a-6bcc-4ec3-b153-2bcdbeda4688 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Refreshing network info cache for port a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1954.611036] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775117, 'name': CreateVM_Task, 'duration_secs': 0.380838} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.611253] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1954.611800] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1954.612016] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1954.612338] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1954.612645] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e72786f2-1af3-49af-91c5-605773731688 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.616733] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1954.616733] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5249cd64-68f2-4e1c-5bc4-2239d5b169bb" [ 1954.616733] env[63371]: _type = "Task" [ 1954.616733] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.623732] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5249cd64-68f2-4e1c-5bc4-2239d5b169bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.722282] env[63371]: DEBUG oslo_concurrency.lockutils [req-e4e08c83-02cb-4412-a094-d841b9d476ee req-0e541a58-7e3c-40bc-9db3-cdb17a2976a8 service nova] Releasing lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1954.725083] env[63371]: DEBUG nova.network.neutron [-] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.950989] env[63371]: DEBUG nova.network.neutron [req-e138639b-386c-48ac-b3b6-b80aefc12b56 req-4b4c043a-6bcc-4ec3-b153-2bcdbeda4688 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updated VIF entry in instance network info cache for port a21caeee-a9c4-4ead-8c4e-4dc84446b5b4. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1954.951958] env[63371]: DEBUG nova.network.neutron [req-e138639b-386c-48ac-b3b6-b80aefc12b56 req-4b4c043a-6bcc-4ec3-b153-2bcdbeda4688 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updating instance_info_cache with network_info: [{"id": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "address": "fa:16:3e:00:9c:75", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa21caeee-a9", "ovs_interfaceid": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1955.130118] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5249cd64-68f2-4e1c-5bc4-2239d5b169bb, 'name': SearchDatastore_Task, 'duration_secs': 0.008828} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.130490] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.130786] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1955.131110] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1955.131405] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1955.131743] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1955.132160] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7910c7d-89ca-4b22-8d27-eac818ba4c1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.140724] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1955.140897] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1955.141633] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51c20417-1dc7-472f-beb5-96c9ca197e1a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.146774] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1955.146774] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5277b04c-130b-fafe-ddf5-d71d890ff048" [ 1955.146774] env[63371]: _type = "Task" [ 1955.146774] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.153882] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5277b04c-130b-fafe-ddf5-d71d890ff048, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.227614] env[63371]: INFO nova.compute.manager [-] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Took 1.39 seconds to deallocate network for instance. [ 1955.316678] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353213cd-86f0-4e97-83a0-0940cdbef8c3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.324512] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ae46be-cd9f-43ae-9a11-2640c2ac06bd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.355275] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61eaadc-762c-4de2-96e6-8df16223c741 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.363077] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5527da13-0b7d-490f-a6c5-c8435a9c29d9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.375576] env[63371]: DEBUG nova.compute.provider_tree [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1955.454469] env[63371]: DEBUG oslo_concurrency.lockutils [req-e138639b-386c-48ac-b3b6-b80aefc12b56 req-4b4c043a-6bcc-4ec3-b153-2bcdbeda4688 service nova] Releasing lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.454681] env[63371]: DEBUG nova.compute.manager [req-e138639b-386c-48ac-b3b6-b80aefc12b56 req-4b4c043a-6bcc-4ec3-b153-2bcdbeda4688 service nova] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Received event network-vif-deleted-f560031e-f701-4309-aead-34a87be57b22 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1955.454858] env[63371]: INFO nova.compute.manager [req-e138639b-386c-48ac-b3b6-b80aefc12b56 req-4b4c043a-6bcc-4ec3-b153-2bcdbeda4688 service nova] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Neutron deleted interface f560031e-f701-4309-aead-34a87be57b22; detaching it from the instance and deleting it from the info cache [ 1955.455068] env[63371]: DEBUG nova.network.neutron [req-e138639b-386c-48ac-b3b6-b80aefc12b56 req-4b4c043a-6bcc-4ec3-b153-2bcdbeda4688 service nova] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1955.657621] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5277b04c-130b-fafe-ddf5-d71d890ff048, 'name': SearchDatastore_Task, 'duration_secs': 0.007881} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.658454] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0617c4c4-e460-45a2-a444-54a0e5946db4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.663566] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1955.663566] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52d7b292-73f7-9364-eb47-e11a2f432398" [ 1955.663566] env[63371]: _type = "Task" [ 1955.663566] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.670803] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d7b292-73f7-9364-eb47-e11a2f432398, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.735543] env[63371]: DEBUG oslo_concurrency.lockutils [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.895502] env[63371]: ERROR nova.scheduler.client.report [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [req-6091e49d-dff0-4542-b033-c990f5d28801] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c079ebb1-2fa2-4df9-bdab-118e305653c1. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6091e49d-dff0-4542-b033-c990f5d28801"}]} [ 1955.910399] env[63371]: DEBUG nova.scheduler.client.report [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Refreshing inventories for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1955.923046] env[63371]: DEBUG nova.scheduler.client.report [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Updating ProviderTree inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1955.923269] env[63371]: DEBUG nova.compute.provider_tree [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1955.933519] env[63371]: DEBUG nova.scheduler.client.report [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Refreshing aggregate associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, aggregates: None {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1955.957723] env[63371]: DEBUG nova.scheduler.client.report [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Refreshing trait associations for resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1, traits: HW_ARCH_X86_64,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE {{(pid=63371) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1955.960521] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4ff576bd-bab5-4d8a-a744-0f0bdca5a67a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.969556] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13b1692-ce74-410e-b31a-282e62b499a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.000154] env[63371]: DEBUG nova.compute.manager [req-e138639b-386c-48ac-b3b6-b80aefc12b56 req-4b4c043a-6bcc-4ec3-b153-2bcdbeda4688 service nova] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Detach interface failed, port_id=f560031e-f701-4309-aead-34a87be57b22, reason: Instance 1ec21edd-7b7c-4a2b-983f-8aa6c022e033 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 1956.060245] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad180b65-658e-44cb-bd30-e677e4254639 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.067362] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11bdfc5e-5c5a-4f17-93ed-2357d51bc646 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.099316] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77a8001-0611-4f9e-b04d-90a70d68b1b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.106329] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e622525-5643-4c7f-b9e9-06008897ab80 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.119186] env[63371]: DEBUG nova.compute.provider_tree [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1956.172996] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52d7b292-73f7-9364-eb47-e11a2f432398, 'name': SearchDatastore_Task, 'duration_secs': 0.010133} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.173265] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.173519] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3d2dabd1-5c4f-4997-843c-e1e124b687ba/3d2dabd1-5c4f-4997-843c-e1e124b687ba.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1956.173755] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46658cac-433b-47fb-abd6-3705667a580e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.180178] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1956.180178] env[63371]: value = "task-1775118" [ 1956.180178] env[63371]: _type = "Task" [ 1956.180178] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.187305] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775118, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.651741] env[63371]: DEBUG nova.scheduler.client.report [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 182 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1956.652041] env[63371]: DEBUG nova.compute.provider_tree [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 182 to 183 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1956.652235] env[63371]: DEBUG nova.compute.provider_tree [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 164, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1956.691086] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775118, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.157277] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.955s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.157671] env[63371]: DEBUG nova.compute.manager [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 1957.160867] env[63371]: DEBUG oslo_concurrency.lockutils [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.425s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.160985] env[63371]: DEBUG nova.objects.instance [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lazy-loading 'resources' on Instance uuid 1ec21edd-7b7c-4a2b-983f-8aa6c022e033 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1957.190504] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775118, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592325} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.191234] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 3d2dabd1-5c4f-4997-843c-e1e124b687ba/3d2dabd1-5c4f-4997-843c-e1e124b687ba.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1957.191442] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1957.191677] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9368cb03-f2c2-4fdb-a9fd-acf9c47f2ac1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.197829] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1957.197829] env[63371]: value = "task-1775119" [ 1957.197829] env[63371]: _type = "Task" [ 1957.197829] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.205258] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775119, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.664432] env[63371]: DEBUG nova.compute.utils [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1957.669447] env[63371]: DEBUG nova.compute.manager [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 1957.669447] env[63371]: DEBUG nova.network.neutron [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1957.708356] env[63371]: DEBUG nova.policy [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2ef97c1a9a174c1888972e6f281eecbe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2601d597b4d64481ace490d56d1056a6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 1957.712640] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775119, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05807} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.713160] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1957.715834] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44acea3d-5aa7-4444-9e2a-02e66287cad4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.739146] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 3d2dabd1-5c4f-4997-843c-e1e124b687ba/3d2dabd1-5c4f-4997-843c-e1e124b687ba.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1957.742985] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f755db57-4881-4f17-a857-3398abed8fc0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.763573] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1957.763573] env[63371]: value = "task-1775120" [ 1957.763573] env[63371]: _type = "Task" [ 1957.763573] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.774093] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775120, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.801333] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8233df-ae17-46bd-af41-5625ee74b9ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.809849] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7deaa856-c2f2-4508-974c-a74d3410b2f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.841502] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff16c71-dd8c-4e5a-b685-c43c35a7dc28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.849852] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7a8f27-ad54-410a-a920-59223a4adc98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.863998] env[63371]: DEBUG nova.compute.provider_tree [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1958.031791] env[63371]: DEBUG nova.network.neutron [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Successfully created port: 429adb0e-c314-4234-9278-025fc3386ec7 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1958.169539] env[63371]: DEBUG nova.compute.manager [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 1958.275499] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775120, 'name': ReconfigVM_Task, 'duration_secs': 0.293438} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.275779] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 3d2dabd1-5c4f-4997-843c-e1e124b687ba/3d2dabd1-5c4f-4997-843c-e1e124b687ba.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1958.276382] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a7c0de74-a793-4f0c-bdf7-4987b79a314c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.283183] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1958.283183] env[63371]: value = "task-1775121" [ 1958.283183] env[63371]: _type = "Task" [ 1958.283183] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.290860] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775121, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.397650] env[63371]: DEBUG nova.scheduler.client.report [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Updated inventory for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with generation 183 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1958.397937] env[63371]: DEBUG nova.compute.provider_tree [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Updating resource provider c079ebb1-2fa2-4df9-bdab-118e305653c1 generation from 183 to 184 during operation: update_inventory {{(pid=63371) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1958.398141] env[63371]: DEBUG nova.compute.provider_tree [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Updating inventory in ProviderTree for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1958.674829] env[63371]: INFO nova.virt.block_device [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Booting with volume 326a81a2-ec8c-4921-b1d8-903c122d6006 at /dev/sda [ 1958.707296] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-48ed2018-40c4-4608-aeb5-b9598a87f2af {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.717893] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3469fc4c-9d1f-4a0c-af53-0dd8ec337e30 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.746544] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4046d158-6661-471b-a55e-ce0b918d9df9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.755147] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885fe194-62fb-47ab-82e5-1a3ddc9eea78 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.783388] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a0d074-2867-492b-baab-0af9871cefcc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.794244] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a24723-2177-4cf2-91d7-944ec1965137 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.796488] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775121, 'name': Rename_Task, 'duration_secs': 0.142852} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.796751] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1958.797254] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b06e6fc-0723-452d-b3cd-37b8608caf43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.803408] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1958.803408] env[63371]: value = "task-1775122" [ 1958.803408] env[63371]: _type = "Task" [ 1958.803408] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.809583] env[63371]: DEBUG nova.virt.block_device [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating existing volume attachment record: 180948d5-b799-4c0d-9cc5-f2aec8362d37 {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1958.815970] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775122, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.903135] env[63371]: DEBUG oslo_concurrency.lockutils [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.742s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.924167] env[63371]: INFO nova.scheduler.client.report [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Deleted allocations for instance 1ec21edd-7b7c-4a2b-983f-8aa6c022e033 [ 1959.313616] env[63371]: DEBUG oslo_vmware.api [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775122, 'name': PowerOnVM_Task, 'duration_secs': 0.433219} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.313901] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1959.314148] env[63371]: INFO nova.compute.manager [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Took 7.33 seconds to spawn the instance on the hypervisor. [ 1959.314344] env[63371]: DEBUG nova.compute.manager [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1959.315154] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7239548b-0a5f-4eb3-933d-3985d3c18077 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.418639] env[63371]: DEBUG nova.compute.manager [req-9c9a8a80-4f70-4029-abe3-5e41525bb58b req-2337c598-6eff-4a16-a2a5-97cb7e296067 service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Received event network-vif-plugged-429adb0e-c314-4234-9278-025fc3386ec7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1959.418869] env[63371]: DEBUG oslo_concurrency.lockutils [req-9c9a8a80-4f70-4029-abe3-5e41525bb58b req-2337c598-6eff-4a16-a2a5-97cb7e296067 service nova] Acquiring lock "0a174705-f4ec-407c-b7ea-0945d5db46cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.419082] env[63371]: DEBUG oslo_concurrency.lockutils [req-9c9a8a80-4f70-4029-abe3-5e41525bb58b req-2337c598-6eff-4a16-a2a5-97cb7e296067 service nova] Lock "0a174705-f4ec-407c-b7ea-0945d5db46cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.419260] env[63371]: DEBUG oslo_concurrency.lockutils [req-9c9a8a80-4f70-4029-abe3-5e41525bb58b req-2337c598-6eff-4a16-a2a5-97cb7e296067 service nova] Lock "0a174705-f4ec-407c-b7ea-0945d5db46cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.419427] env[63371]: DEBUG nova.compute.manager [req-9c9a8a80-4f70-4029-abe3-5e41525bb58b req-2337c598-6eff-4a16-a2a5-97cb7e296067 service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] No waiting events found dispatching network-vif-plugged-429adb0e-c314-4234-9278-025fc3386ec7 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1959.419590] env[63371]: WARNING nova.compute.manager [req-9c9a8a80-4f70-4029-abe3-5e41525bb58b req-2337c598-6eff-4a16-a2a5-97cb7e296067 service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Received unexpected event network-vif-plugged-429adb0e-c314-4234-9278-025fc3386ec7 for instance with vm_state building and task_state block_device_mapping. [ 1959.431065] env[63371]: DEBUG oslo_concurrency.lockutils [None req-46ee9fe2-2e6b-4766-8721-49cb2d9da85e tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "1ec21edd-7b7c-4a2b-983f-8aa6c022e033" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.731s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1959.506452] env[63371]: DEBUG nova.network.neutron [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Successfully updated port: 429adb0e-c314-4234-9278-025fc3386ec7 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1959.835930] env[63371]: INFO nova.compute.manager [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Took 13.34 seconds to build instance. [ 1960.010332] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.010332] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.010332] env[63371]: DEBUG nova.network.neutron [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1960.338091] env[63371]: DEBUG oslo_concurrency.lockutils [None req-f2c0263b-e7b4-48a8-8085-c23fc5c7ae0a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.863s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.347684] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.347933] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.348175] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.348360] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.348820] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.350829] env[63371]: INFO nova.compute.manager [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Terminating instance [ 1960.352680] env[63371]: DEBUG nova.compute.manager [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 1960.352887] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1960.353738] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee86c9f0-c51e-4d29-bd2c-e13a0de8e0c2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.361809] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1960.362011] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6006f678-fc20-438d-9976-9c3065911ac5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.369419] env[63371]: DEBUG oslo_vmware.api [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1960.369419] env[63371]: value = "task-1775123" [ 1960.369419] env[63371]: _type = "Task" [ 1960.369419] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.378549] env[63371]: DEBUG oslo_vmware.api [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775123, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.543721] env[63371]: DEBUG nova.network.neutron [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1960.680300] env[63371]: DEBUG nova.network.neutron [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance_info_cache with network_info: [{"id": "429adb0e-c314-4234-9278-025fc3386ec7", "address": "fa:16:3e:7c:10:0e", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap429adb0e-c3", "ovs_interfaceid": "429adb0e-c314-4234-9278-025fc3386ec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.880110] env[63371]: DEBUG oslo_vmware.api [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775123, 'name': PowerOffVM_Task, 'duration_secs': 0.251373} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.880579] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1960.880579] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1960.880741] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94303976-4dad-4e5a-a242-ba6c2ad5599e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.892546] env[63371]: DEBUG nova.compute.manager [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 1960.893049] env[63371]: DEBUG nova.virt.hardware [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1960.893267] env[63371]: DEBUG nova.virt.hardware [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1960.893420] env[63371]: DEBUG nova.virt.hardware [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1960.893599] env[63371]: DEBUG nova.virt.hardware [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1960.893742] env[63371]: DEBUG nova.virt.hardware [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1960.893972] env[63371]: DEBUG nova.virt.hardware [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1960.894119] env[63371]: DEBUG nova.virt.hardware [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1960.894292] env[63371]: DEBUG nova.virt.hardware [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1960.894458] env[63371]: DEBUG nova.virt.hardware [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1960.894617] env[63371]: DEBUG nova.virt.hardware [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1960.894785] env[63371]: DEBUG nova.virt.hardware [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1960.895611] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e210eae-f637-4d8b-85c0-5aea114c8ef9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.904727] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e5ee21-03f8-40f8-bc2b-58b1d1564c77 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.937117] env[63371]: DEBUG nova.compute.manager [req-101cea22-0fa4-485d-8bf1-96d5e8401349 req-0cc09370-7f4f-4678-8b97-5844ebc1002b service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Received event network-changed-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1960.937466] env[63371]: DEBUG nova.compute.manager [req-101cea22-0fa4-485d-8bf1-96d5e8401349 req-0cc09370-7f4f-4678-8b97-5844ebc1002b service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Refreshing instance network info cache due to event network-changed-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1960.937822] env[63371]: DEBUG oslo_concurrency.lockutils [req-101cea22-0fa4-485d-8bf1-96d5e8401349 req-0cc09370-7f4f-4678-8b97-5844ebc1002b service nova] Acquiring lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.938116] env[63371]: DEBUG oslo_concurrency.lockutils [req-101cea22-0fa4-485d-8bf1-96d5e8401349 req-0cc09370-7f4f-4678-8b97-5844ebc1002b service nova] Acquired lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.938416] env[63371]: DEBUG nova.network.neutron [req-101cea22-0fa4-485d-8bf1-96d5e8401349 req-0cc09370-7f4f-4678-8b97-5844ebc1002b service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Refreshing network info cache for port a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1960.961692] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1960.961955] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1960.962126] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Deleting the datastore file [datastore1] 0518c5a8-8cc1-4829-a0cf-5f5904f6df86 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1960.962400] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-523348db-343b-4c56-9110-f07b56bd3360 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.969681] env[63371]: DEBUG oslo_vmware.api [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for the task: (returnval){ [ 1960.969681] env[63371]: value = "task-1775125" [ 1960.969681] env[63371]: _type = "Task" [ 1960.969681] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.978327] env[63371]: DEBUG oslo_vmware.api [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775125, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.183677] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.184138] env[63371]: DEBUG nova.compute.manager [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Instance network_info: |[{"id": "429adb0e-c314-4234-9278-025fc3386ec7", "address": "fa:16:3e:7c:10:0e", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap429adb0e-c3", "ovs_interfaceid": "429adb0e-c314-4234-9278-025fc3386ec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 1961.184649] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:10:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '429adb0e-c314-4234-9278-025fc3386ec7', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1961.192632] env[63371]: DEBUG oslo.service.loopingcall [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1961.192887] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1961.193161] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78cf18c1-083a-4706-9a79-ccb17905d246 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.214546] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1961.214546] env[63371]: value = "task-1775126" [ 1961.214546] env[63371]: _type = "Task" [ 1961.214546] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.222451] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775126, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.449592] env[63371]: DEBUG nova.compute.manager [req-54caa2eb-29ad-45f5-a366-54e0000781e0 req-df273f79-bb0a-46a8-8f20-858ad4a165df service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Received event network-changed-429adb0e-c314-4234-9278-025fc3386ec7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1961.449815] env[63371]: DEBUG nova.compute.manager [req-54caa2eb-29ad-45f5-a366-54e0000781e0 req-df273f79-bb0a-46a8-8f20-858ad4a165df service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Refreshing instance network info cache due to event network-changed-429adb0e-c314-4234-9278-025fc3386ec7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1961.450055] env[63371]: DEBUG oslo_concurrency.lockutils [req-54caa2eb-29ad-45f5-a366-54e0000781e0 req-df273f79-bb0a-46a8-8f20-858ad4a165df service nova] Acquiring lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1961.450261] env[63371]: DEBUG oslo_concurrency.lockutils [req-54caa2eb-29ad-45f5-a366-54e0000781e0 req-df273f79-bb0a-46a8-8f20-858ad4a165df service nova] Acquired lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1961.450433] env[63371]: DEBUG nova.network.neutron [req-54caa2eb-29ad-45f5-a366-54e0000781e0 req-df273f79-bb0a-46a8-8f20-858ad4a165df service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Refreshing network info cache for port 429adb0e-c314-4234-9278-025fc3386ec7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1961.480613] env[63371]: DEBUG oslo_vmware.api [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Task: {'id': task-1775125, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.442903} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.483108] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1961.483308] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1961.483486] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1961.483658] env[63371]: INFO nova.compute.manager [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1961.483893] env[63371]: DEBUG oslo.service.loopingcall [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1961.484445] env[63371]: DEBUG nova.compute.manager [-] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 1961.484545] env[63371]: DEBUG nova.network.neutron [-] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1961.725486] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775126, 'name': CreateVM_Task, 'duration_secs': 0.374098} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.725651] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1961.726323] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'disk_bus': None, 'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368516', 'volume_id': '326a81a2-ec8c-4921-b1d8-903c122d6006', 'name': 'volume-326a81a2-ec8c-4921-b1d8-903c122d6006', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0a174705-f4ec-407c-b7ea-0945d5db46cf', 'attached_at': '', 'detached_at': '', 'volume_id': '326a81a2-ec8c-4921-b1d8-903c122d6006', 'serial': '326a81a2-ec8c-4921-b1d8-903c122d6006'}, 'boot_index': 0, 'device_type': None, 'attachment_id': '180948d5-b799-4c0d-9cc5-f2aec8362d37', 'mount_device': '/dev/sda', 'volume_type': None}], 'swap': None} {{(pid=63371) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1961.726537] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Root volume attach. Driver type: vmdk {{(pid=63371) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1961.727325] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44202049-3a8c-4044-b9c6-3737a27b7653 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.738408] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a390c966-9d9a-459b-91e1-7dab53238164 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.745863] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83d3116d-8898-44d1-a413-716fff71049d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.752594] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-4a1c4f2f-4bcd-4f50-b32e-6330372adc03 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.760280] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1961.760280] env[63371]: value = "task-1775127" [ 1961.760280] env[63371]: _type = "Task" [ 1961.760280] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.770864] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775127, 'name': RelocateVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.803354] env[63371]: DEBUG nova.network.neutron [req-101cea22-0fa4-485d-8bf1-96d5e8401349 req-0cc09370-7f4f-4678-8b97-5844ebc1002b service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updated VIF entry in instance network info cache for port a21caeee-a9c4-4ead-8c4e-4dc84446b5b4. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1961.803866] env[63371]: DEBUG nova.network.neutron [req-101cea22-0fa4-485d-8bf1-96d5e8401349 req-0cc09370-7f4f-4678-8b97-5844ebc1002b service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updating instance_info_cache with network_info: [{"id": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "address": "fa:16:3e:00:9c:75", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa21caeee-a9", "ovs_interfaceid": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1962.271631] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775127, 'name': RelocateVM_Task, 'duration_secs': 0.351432} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.271960] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1962.272137] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368516', 'volume_id': '326a81a2-ec8c-4921-b1d8-903c122d6006', 'name': 'volume-326a81a2-ec8c-4921-b1d8-903c122d6006', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0a174705-f4ec-407c-b7ea-0945d5db46cf', 'attached_at': '', 'detached_at': '', 'volume_id': '326a81a2-ec8c-4921-b1d8-903c122d6006', 'serial': '326a81a2-ec8c-4921-b1d8-903c122d6006'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1962.272889] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-713ae98a-6048-43b9-b4e0-be4cfe3df91a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.289262] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9e1142-f0c1-4d2e-9f3d-53bda1fdba7c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.312051] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] volume-326a81a2-ec8c-4921-b1d8-903c122d6006/volume-326a81a2-ec8c-4921-b1d8-903c122d6006.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1962.313328] env[63371]: DEBUG nova.network.neutron [req-54caa2eb-29ad-45f5-a366-54e0000781e0 req-df273f79-bb0a-46a8-8f20-858ad4a165df service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updated VIF entry in instance network info cache for port 429adb0e-c314-4234-9278-025fc3386ec7. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1962.313328] env[63371]: DEBUG nova.network.neutron [req-54caa2eb-29ad-45f5-a366-54e0000781e0 req-df273f79-bb0a-46a8-8f20-858ad4a165df service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance_info_cache with network_info: [{"id": "429adb0e-c314-4234-9278-025fc3386ec7", "address": "fa:16:3e:7c:10:0e", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap429adb0e-c3", "ovs_interfaceid": "429adb0e-c314-4234-9278-025fc3386ec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1962.315672] env[63371]: DEBUG oslo_concurrency.lockutils [req-101cea22-0fa4-485d-8bf1-96d5e8401349 req-0cc09370-7f4f-4678-8b97-5844ebc1002b service nova] Releasing lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.315953] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6fd1138-f0a4-4e97-b622-1cd7668178e7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.338150] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1962.338150] env[63371]: value = "task-1775128" [ 1962.338150] env[63371]: _type = "Task" [ 1962.338150] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.346522] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775128, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.558991] env[63371]: DEBUG nova.network.neutron [-] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1962.816049] env[63371]: DEBUG oslo_concurrency.lockutils [req-54caa2eb-29ad-45f5-a366-54e0000781e0 req-df273f79-bb0a-46a8-8f20-858ad4a165df service nova] Releasing lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.849331] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.062635] env[63371]: INFO nova.compute.manager [-] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Took 1.58 seconds to deallocate network for instance. [ 1963.349885] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775128, 'name': ReconfigVM_Task, 'duration_secs': 0.772408} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.350230] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Reconfigured VM instance instance-0000007a to attach disk [datastore1] volume-326a81a2-ec8c-4921-b1d8-903c122d6006/volume-326a81a2-ec8c-4921-b1d8-903c122d6006.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1963.355125] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-20f9fccd-20a7-4dc5-a41d-c4c04f5f0625 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.370775] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1963.370775] env[63371]: value = "task-1775129" [ 1963.370775] env[63371]: _type = "Task" [ 1963.370775] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.380182] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775129, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.481103] env[63371]: DEBUG nova.compute.manager [req-cb5b557d-09c4-4436-bd30-046a571a91c8 req-25c216ca-e4fa-4288-a755-fb73e0a35056 service nova] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Received event network-vif-deleted-e4ee0c90-4a70-4f4e-b976-34412c13da2f {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1963.570394] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.570671] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.570884] env[63371]: DEBUG nova.objects.instance [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lazy-loading 'resources' on Instance uuid 0518c5a8-8cc1-4829-a0cf-5f5904f6df86 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1963.888625] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775129, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.171604] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df56462c-5f5c-4a74-9d71-b7d04a8f2d9a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.179575] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b2b7e3-efd4-4bee-96e9-91af1aa3ce52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.209067] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fa6e5b0-3237-4395-9652-7f67c0403c71 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.216807] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2d93af-2640-4aa5-8a53-49baceadea38 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.231187] env[63371]: DEBUG nova.compute.provider_tree [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1964.381457] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775129, 'name': ReconfigVM_Task, 'duration_secs': 0.80492} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.381813] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368516', 'volume_id': '326a81a2-ec8c-4921-b1d8-903c122d6006', 'name': 'volume-326a81a2-ec8c-4921-b1d8-903c122d6006', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '0a174705-f4ec-407c-b7ea-0945d5db46cf', 'attached_at': '', 'detached_at': '', 'volume_id': '326a81a2-ec8c-4921-b1d8-903c122d6006', 'serial': '326a81a2-ec8c-4921-b1d8-903c122d6006'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1964.382297] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1748ddb3-6f31-4a69-9631-4ec4a48c266d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.388922] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1964.388922] env[63371]: value = "task-1775130" [ 1964.388922] env[63371]: _type = "Task" [ 1964.388922] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.396374] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775130, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.735278] env[63371]: DEBUG nova.scheduler.client.report [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1964.899321] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775130, 'name': Rename_Task, 'duration_secs': 0.347085} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.899598] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1964.899848] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4fa1e51f-e0b6-48db-8d13-eb8f539016a3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.906982] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1964.906982] env[63371]: value = "task-1775131" [ 1964.906982] env[63371]: _type = "Task" [ 1964.906982] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.915519] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.240073] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.669s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1965.265776] env[63371]: INFO nova.scheduler.client.report [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Deleted allocations for instance 0518c5a8-8cc1-4829-a0cf-5f5904f6df86 [ 1965.419924] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775131, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.776581] env[63371]: DEBUG oslo_concurrency.lockutils [None req-d0d866c7-dcf0-4803-8724-4b3e06354ed8 tempest-ServerRescueNegativeTestJSON-957811564 tempest-ServerRescueNegativeTestJSON-957811564-project-member] Lock "0518c5a8-8cc1-4829-a0cf-5f5904f6df86" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.428s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1965.919357] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775131, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.419722] env[63371]: DEBUG oslo_vmware.api [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775131, 'name': PowerOnVM_Task, 'duration_secs': 1.21481} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.419984] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1966.420260] env[63371]: INFO nova.compute.manager [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Took 5.53 seconds to spawn the instance on the hypervisor. [ 1966.420438] env[63371]: DEBUG nova.compute.manager [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 1966.421361] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02735523-6e50-4e7f-91b0-a990fa2949d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.939870] env[63371]: INFO nova.compute.manager [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Took 12.76 seconds to build instance. [ 1967.442283] env[63371]: DEBUG oslo_concurrency.lockutils [None req-47af5a82-e6c6-405b-99ca-b99f809ecd7e tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "0a174705-f4ec-407c-b7ea-0945d5db46cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.269s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.566184] env[63371]: DEBUG nova.compute.manager [req-19b84e08-379f-4085-94f9-5e6aab41f02d req-6c22f821-5823-4365-89c5-ca5530ace73f service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Received event network-changed-a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1967.566415] env[63371]: DEBUG nova.compute.manager [req-19b84e08-379f-4085-94f9-5e6aab41f02d req-6c22f821-5823-4365-89c5-ca5530ace73f service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Refreshing instance network info cache due to event network-changed-a7788c55-6aa0-4056-b8d1-cff8ad8951f7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1967.566648] env[63371]: DEBUG oslo_concurrency.lockutils [req-19b84e08-379f-4085-94f9-5e6aab41f02d req-6c22f821-5823-4365-89c5-ca5530ace73f service nova] Acquiring lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1967.566794] env[63371]: DEBUG oslo_concurrency.lockutils [req-19b84e08-379f-4085-94f9-5e6aab41f02d req-6c22f821-5823-4365-89c5-ca5530ace73f service nova] Acquired lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1967.566958] env[63371]: DEBUG nova.network.neutron [req-19b84e08-379f-4085-94f9-5e6aab41f02d req-6c22f821-5823-4365-89c5-ca5530ace73f service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Refreshing network info cache for port a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1968.282674] env[63371]: DEBUG nova.network.neutron [req-19b84e08-379f-4085-94f9-5e6aab41f02d req-6c22f821-5823-4365-89c5-ca5530ace73f service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updated VIF entry in instance network info cache for port a7788c55-6aa0-4056-b8d1-cff8ad8951f7. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1968.283070] env[63371]: DEBUG nova.network.neutron [req-19b84e08-379f-4085-94f9-5e6aab41f02d req-6c22f821-5823-4365-89c5-ca5530ace73f service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updating instance_info_cache with network_info: [{"id": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "address": "fa:16:3e:8d:c6:ca", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7788c55-6a", "ovs_interfaceid": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.786304] env[63371]: DEBUG oslo_concurrency.lockutils [req-19b84e08-379f-4085-94f9-5e6aab41f02d req-6c22f821-5823-4365-89c5-ca5530ace73f service nova] Releasing lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1969.177144] env[63371]: DEBUG nova.compute.manager [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Stashing vm_state: active {{(pid=63371) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1969.595778] env[63371]: DEBUG nova.compute.manager [req-f55529a9-17bb-45de-85cc-92fbc38ffd48 req-cc89af55-28c8-4d89-8720-0497d3d2764e service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Received event network-changed-429adb0e-c314-4234-9278-025fc3386ec7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 1969.595994] env[63371]: DEBUG nova.compute.manager [req-f55529a9-17bb-45de-85cc-92fbc38ffd48 req-cc89af55-28c8-4d89-8720-0497d3d2764e service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Refreshing instance network info cache due to event network-changed-429adb0e-c314-4234-9278-025fc3386ec7. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 1969.596223] env[63371]: DEBUG oslo_concurrency.lockutils [req-f55529a9-17bb-45de-85cc-92fbc38ffd48 req-cc89af55-28c8-4d89-8720-0497d3d2764e service nova] Acquiring lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1969.596355] env[63371]: DEBUG oslo_concurrency.lockutils [req-f55529a9-17bb-45de-85cc-92fbc38ffd48 req-cc89af55-28c8-4d89-8720-0497d3d2764e service nova] Acquired lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1969.596517] env[63371]: DEBUG nova.network.neutron [req-f55529a9-17bb-45de-85cc-92fbc38ffd48 req-cc89af55-28c8-4d89-8720-0497d3d2764e service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Refreshing network info cache for port 429adb0e-c314-4234-9278-025fc3386ec7 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1969.697112] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.697398] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1970.202658] env[63371]: INFO nova.compute.claims [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1970.317709] env[63371]: DEBUG nova.network.neutron [req-f55529a9-17bb-45de-85cc-92fbc38ffd48 req-cc89af55-28c8-4d89-8720-0497d3d2764e service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updated VIF entry in instance network info cache for port 429adb0e-c314-4234-9278-025fc3386ec7. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1970.318122] env[63371]: DEBUG nova.network.neutron [req-f55529a9-17bb-45de-85cc-92fbc38ffd48 req-cc89af55-28c8-4d89-8720-0497d3d2764e service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance_info_cache with network_info: [{"id": "429adb0e-c314-4234-9278-025fc3386ec7", "address": "fa:16:3e:7c:10:0e", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap429adb0e-c3", "ovs_interfaceid": "429adb0e-c314-4234-9278-025fc3386ec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.710822] env[63371]: INFO nova.compute.resource_tracker [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating resource usage from migration 0af0bbaf-03e7-4421-a1d3-0e2f3ab8aae2 [ 1970.794098] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07101d93-b824-4d39-a7a1-c2463c65dd64 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.802676] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12972b31-e395-45eb-8c5c-652c37a545c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.831345] env[63371]: DEBUG oslo_concurrency.lockutils [req-f55529a9-17bb-45de-85cc-92fbc38ffd48 req-cc89af55-28c8-4d89-8720-0497d3d2764e service nova] Releasing lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1970.832390] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b13227-e0d5-400b-ac96-e7904f766f27 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.839458] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883d66d6-04a5-4598-9f99-b9752b900ac0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.854071] env[63371]: DEBUG nova.compute.provider_tree [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1971.357074] env[63371]: DEBUG nova.scheduler.client.report [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1971.862922] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.165s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.863154] env[63371]: INFO nova.compute.manager [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Migrating [ 1972.377504] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1972.377730] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1972.377903] env[63371]: DEBUG nova.network.neutron [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1973.087860] env[63371]: DEBUG nova.network.neutron [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance_info_cache with network_info: [{"id": "429adb0e-c314-4234-9278-025fc3386ec7", "address": "fa:16:3e:7c:10:0e", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap429adb0e-c3", "ovs_interfaceid": "429adb0e-c314-4234-9278-025fc3386ec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.591142] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.107066] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4769c7e-5955-4bc3-959f-8788f6dfda0d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.127245] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance '0a174705-f4ec-407c-b7ea-0945d5db46cf' progress to 0 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1975.633591] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1975.634145] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-35d81c09-9240-4b52-90f9-0410caa89b0f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.643506] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1975.643506] env[63371]: value = "task-1775132" [ 1975.643506] env[63371]: _type = "Task" [ 1975.643506] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.653180] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775132, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.153269] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775132, 'name': PowerOffVM_Task, 'duration_secs': 0.21356} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.153712] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1976.153712] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance '0a174705-f4ec-407c-b7ea-0945d5db46cf' progress to 17 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1976.659764] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1976.660032] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1976.660177] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1976.660364] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1976.660508] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1976.660652] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1976.660848] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1976.661012] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1976.661189] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1976.661354] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1976.661526] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1976.666733] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91142432-d89c-4f10-bd92-a8b910c97da0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.683910] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1976.683910] env[63371]: value = "task-1775133" [ 1976.683910] env[63371]: _type = "Task" [ 1976.683910] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.695203] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775133, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.194180] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775133, 'name': ReconfigVM_Task, 'duration_secs': 0.16328} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.194573] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance '0a174705-f4ec-407c-b7ea-0945d5db46cf' progress to 33 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1977.700650] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1977.700954] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1977.701134] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1977.701343] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1977.701486] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1977.701644] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1977.701851] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1977.702035] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1977.702260] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1977.702436] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1977.702626] env[63371]: DEBUG nova.virt.hardware [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1977.708135] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1977.708473] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37173052-41f5-4c96-b3b0-502070f3ce74 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.729880] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1977.729880] env[63371]: value = "task-1775134" [ 1977.729880] env[63371]: _type = "Task" [ 1977.729880] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.739207] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775134, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.240107] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775134, 'name': ReconfigVM_Task, 'duration_secs': 0.170393} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.240477] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1978.241131] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f6acbf-b58c-4bcc-a0f0-386facf4a6bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.262722] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] volume-326a81a2-ec8c-4921-b1d8-903c122d6006/volume-326a81a2-ec8c-4921-b1d8-903c122d6006.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1978.263284] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-82772a22-2be6-4778-b0a7-6aea96f8a055 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.280974] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1978.280974] env[63371]: value = "task-1775135" [ 1978.280974] env[63371]: _type = "Task" [ 1978.280974] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.288523] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775135, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.790273] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775135, 'name': ReconfigVM_Task, 'duration_secs': 0.267302} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.790547] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Reconfigured VM instance instance-0000007a to attach disk [datastore1] volume-326a81a2-ec8c-4921-b1d8-903c122d6006/volume-326a81a2-ec8c-4921-b1d8-903c122d6006.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1978.790808] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance '0a174705-f4ec-407c-b7ea-0945d5db46cf' progress to 50 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1979.297910] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e2cf47-4f62-4b6b-9a06-51899310a2f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.318265] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ddc423-2c77-4752-876f-32bc85d0d64c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.336433] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance '0a174705-f4ec-407c-b7ea-0945d5db46cf' progress to 67 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1979.813472] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1979.813711] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1980.319919] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1980.320391] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 1980.851598] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.851752] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquired lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.851828] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Forcefully refreshing network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1980.971550] env[63371]: DEBUG nova.network.neutron [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Port 429adb0e-c314-4234-9278-025fc3386ec7 binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1981.994589] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "0a174705-f4ec-407c-b7ea-0945d5db46cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1981.994589] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "0a174705-f4ec-407c-b7ea-0945d5db46cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1981.994938] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "0a174705-f4ec-407c-b7ea-0945d5db46cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.053133] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updating instance_info_cache with network_info: [{"id": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "address": "fa:16:3e:8d:c6:ca", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7788c55-6a", "ovs_interfaceid": "a7788c55-6aa0-4056-b8d1-cff8ad8951f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.555833] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Releasing lock "refresh_cache-6df9af10-0053-4696-920a-10ab2af67ef5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1982.556219] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updated the network info_cache for instance {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 1982.556340] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1982.556404] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1982.556540] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1982.556685] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1982.556823] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1982.556962] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1982.557104] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 1982.557248] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1983.029834] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1983.030065] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.030265] env[63371]: DEBUG nova.network.neutron [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1983.060244] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.060469] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.060624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.060773] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1983.061730] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7413d183-4b8f-43ec-9fbe-5e100b753ab0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.070048] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466d6895-3d96-46f4-aad5-adb79e518fce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.084656] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360af73b-6742-491f-867f-ddc8d23ccc2f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.091419] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2eb436-e1a4-4aab-8a13-1ad687a9a66a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.120442] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180541MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1983.120590] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.120852] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.754406] env[63371]: DEBUG nova.network.neutron [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance_info_cache with network_info: [{"id": "429adb0e-c314-4234-9278-025fc3386ec7", "address": "fa:16:3e:7c:10:0e", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap429adb0e-c3", "ovs_interfaceid": "429adb0e-c314-4234-9278-025fc3386ec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.131326] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Applying migration context for instance 0a174705-f4ec-407c-b7ea-0945d5db46cf as it has an incoming, in-progress migration 0af0bbaf-03e7-4421-a1d3-0e2f3ab8aae2. Migration status is post-migrating {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1984.132309] env[63371]: INFO nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating resource usage from migration 0af0bbaf-03e7-4421-a1d3-0e2f3ab8aae2 [ 1984.152273] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 6df9af10-0053-4696-920a-10ab2af67ef5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.152440] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 29791f6c-edec-44b3-828b-0e306d167c42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.152619] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.152750] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 6e9b44fb-153c-4aa8-87ec-04d27ab764ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.152864] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 3d2dabd1-5c4f-4997-843c-e1e124b687ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.153124] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Migration 0af0bbaf-03e7-4421-a1d3-0e2f3ab8aae2 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1984.153124] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 0a174705-f4ec-407c-b7ea-0945d5db46cf actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1984.153281] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1984.153415] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1920MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1984.243082] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ee7edb-65a0-479c-b8e6-2a64cd6bc8de {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.251201] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03d317a1-0c0f-4bca-b9a8-75013133a905 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.257254] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1984.285225] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8652737-a469-4e1f-a6b3-b6606618e179 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.293248] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b7fc93-5eb0-409b-9fa0-ede6fe343c93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.306744] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1984.789641] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d4fd5e-9edc-4921-8928-56c4d0abd110 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.797442] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073d260a-32f9-4c9d-984a-656d42717c7c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.812580] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1985.318668] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1985.318960] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.198s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.883099] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.883473] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.883627] env[63371]: INFO nova.compute.manager [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Shelving [ 1985.900752] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b886e83-b9c4-4c33-81bb-e70eb89e0e44 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.920494] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c397be-c356-4ae6-884a-f735aa93464b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.927443] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance '0a174705-f4ec-407c-b7ea-0945d5db46cf' progress to 83 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1986.392420] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1986.392800] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d0ee4c6-83f2-47cb-9b38-2021e2825359 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.399928] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1986.399928] env[63371]: value = "task-1775136" [ 1986.399928] env[63371]: _type = "Task" [ 1986.399928] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.407675] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775136, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.434438] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1986.434732] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-989839ae-630f-4f16-8a2f-96d331870bb7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.441232] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1986.441232] env[63371]: value = "task-1775137" [ 1986.441232] env[63371]: _type = "Task" [ 1986.441232] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.449058] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775137, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.910065] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775136, 'name': PowerOffVM_Task, 'duration_secs': 0.21558} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.910300] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1986.911054] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3ee0fe1-1454-469f-ba78-fb4d7687a802 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.929054] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f293c9-3978-477f-97dd-bb5e9b9aa897 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.951354] env[63371]: DEBUG oslo_vmware.api [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775137, 'name': PowerOnVM_Task, 'duration_secs': 0.379944} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.951614] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1986.951795] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54503cc4-4599-4bb0-a7f5-720f0387df86 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance '0a174705-f4ec-407c-b7ea-0945d5db46cf' progress to 100 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1987.440064] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1987.440332] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-70350b19-eb5c-40e6-bde8-9973a8c6f086 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1987.448395] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1987.448395] env[63371]: value = "task-1775138" [ 1987.448395] env[63371]: _type = "Task" [ 1987.448395] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1987.459548] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775138, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.960152] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775138, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.460201] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775138, 'name': CreateSnapshot_Task, 'duration_secs': 0.602938} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.460502] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1988.461231] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275bd5f8-69e7-40c3-ba0f-3aac3aab331e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.978500] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1988.978889] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3d098539-94d3-418c-8b63-32315233af47 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.988542] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1988.988542] env[63371]: value = "task-1775139" [ 1988.988542] env[63371]: _type = "Task" [ 1988.988542] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.997380] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775139, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.498732] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775139, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.508432] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "0a174705-f4ec-407c-b7ea-0945d5db46cf" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1989.508671] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "0a174705-f4ec-407c-b7ea-0945d5db46cf" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1989.508855] env[63371]: DEBUG nova.compute.manager [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Going to confirm migration 9 {{(pid=63371) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4784}} [ 1989.999398] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775139, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.043639] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1990.043825] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquired lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1990.043996] env[63371]: DEBUG nova.network.neutron [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1990.044205] env[63371]: DEBUG nova.objects.instance [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lazy-loading 'info_cache' on Instance uuid 0a174705-f4ec-407c-b7ea-0945d5db46cf {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1990.501035] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775139, 'name': CloneVM_Task, 'duration_secs': 1.461286} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.501035] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Created linked-clone VM from snapshot [ 1990.501499] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3aa605d2-7aa7-49e4-bb8c-0481b33ea0aa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.508653] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Uploading image d0f4f856-b3e0-4946-9bc9-451dfdc1d434 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1990.532765] env[63371]: DEBUG oslo_vmware.rw_handles [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1990.532765] env[63371]: value = "vm-368521" [ 1990.532765] env[63371]: _type = "VirtualMachine" [ 1990.532765] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1990.533035] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5d4e68f0-131d-48ae-837f-18d742d7dd26 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.540468] env[63371]: DEBUG oslo_vmware.rw_handles [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lease: (returnval){ [ 1990.540468] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fe4bca-0db6-e369-8871-2452ee791985" [ 1990.540468] env[63371]: _type = "HttpNfcLease" [ 1990.540468] env[63371]: } obtained for exporting VM: (result){ [ 1990.540468] env[63371]: value = "vm-368521" [ 1990.540468] env[63371]: _type = "VirtualMachine" [ 1990.540468] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1990.540741] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the lease: (returnval){ [ 1990.540741] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fe4bca-0db6-e369-8871-2452ee791985" [ 1990.540741] env[63371]: _type = "HttpNfcLease" [ 1990.540741] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1990.548858] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1990.548858] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fe4bca-0db6-e369-8871-2452ee791985" [ 1990.548858] env[63371]: _type = "HttpNfcLease" [ 1990.548858] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1991.049062] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1991.049062] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fe4bca-0db6-e369-8871-2452ee791985" [ 1991.049062] env[63371]: _type = "HttpNfcLease" [ 1991.049062] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1991.049368] env[63371]: DEBUG oslo_vmware.rw_handles [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1991.049368] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52fe4bca-0db6-e369-8871-2452ee791985" [ 1991.049368] env[63371]: _type = "HttpNfcLease" [ 1991.049368] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1991.051927] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d34323-e9a1-4f1c-b3f6-3537f37dea5d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.059206] env[63371]: DEBUG oslo_vmware.rw_handles [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522d6e2e-9878-0fa8-8694-0ed776333d24/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1991.059385] env[63371]: DEBUG oslo_vmware.rw_handles [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522d6e2e-9878-0fa8-8694-0ed776333d24/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1991.147424] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d7acc6cb-fe16-4378-aca4-fb09b175a1b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.359635] env[63371]: DEBUG nova.network.neutron [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance_info_cache with network_info: [{"id": "429adb0e-c314-4234-9278-025fc3386ec7", "address": "fa:16:3e:7c:10:0e", "network": {"id": "9b9f285d-f977-4e1a-9842-cbf075dd908a", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-313256073-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2601d597b4d64481ace490d56d1056a6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap429adb0e-c3", "ovs_interfaceid": "429adb0e-c314-4234-9278-025fc3386ec7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1991.464893] env[63371]: DEBUG nova.compute.manager [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Stashing vm_state: active {{(pid=63371) _prep_resize /opt/stack/nova/nova/compute/manager.py:5625}} [ 1991.862197] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Releasing lock "refresh_cache-0a174705-f4ec-407c-b7ea-0945d5db46cf" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1991.862555] env[63371]: DEBUG nova.objects.instance [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lazy-loading 'migration_context' on Instance uuid 0a174705-f4ec-407c-b7ea-0945d5db46cf {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1991.987223] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.987602] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.365771] env[63371]: DEBUG nova.objects.base [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Object Instance<0a174705-f4ec-407c-b7ea-0945d5db46cf> lazy-loaded attributes: info_cache,migration_context {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1992.366848] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1943247f-60ba-4787-b6d2-33530f1ffd07 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.387538] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5fb59eb5-33b4-48e5-8fe0-58033ce7b1c8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.394108] env[63371]: DEBUG oslo_vmware.api [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 1992.394108] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525037fe-2d3b-4432-66aa-b6fb700ecfc0" [ 1992.394108] env[63371]: _type = "Task" [ 1992.394108] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.405676] env[63371]: DEBUG oslo_vmware.api [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525037fe-2d3b-4432-66aa-b6fb700ecfc0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.492726] env[63371]: INFO nova.compute.claims [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1992.905286] env[63371]: DEBUG oslo_vmware.api [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]525037fe-2d3b-4432-66aa-b6fb700ecfc0, 'name': SearchDatastore_Task, 'duration_secs': 0.030845} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.905797] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1992.999711] env[63371]: INFO nova.compute.resource_tracker [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating resource usage from migration c2930f9f-101b-425b-850a-cbb15e0f7c65 [ 1993.122129] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d64c7f-bf45-4b6d-8310-4fff6956b063 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.129938] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f48d76-4451-440b-9bd5-d1becaac2cce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.161924] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c4e09c-a11f-412a-b9fc-34cd807774f3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.169788] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76613b09-8dce-41bd-81eb-7834fbfa6bb9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.184647] env[63371]: DEBUG nova.compute.provider_tree [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1993.688460] env[63371]: DEBUG nova.scheduler.client.report [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1994.193919] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.206s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1994.194381] env[63371]: INFO nova.compute.manager [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Migrating [ 1994.201365] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.296s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.715051] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.715051] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.715051] env[63371]: DEBUG nova.network.neutron [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1994.841842] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814884de-e12a-49e6-9965-edbbb95fc8ab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.850329] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85497871-e419-4dbd-9733-91b725052b1e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.881994] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3dccd6-8922-4061-b90d-35558eaac588 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.890638] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed405057-4ea4-486c-81da-293374a0426e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.904052] env[63371]: DEBUG nova.compute.provider_tree [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1995.407566] env[63371]: DEBUG nova.scheduler.client.report [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1995.447241] env[63371]: DEBUG nova.network.neutron [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance_info_cache with network_info: [{"id": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "address": "fa:16:3e:54:78:e9", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc15567-65", "ovs_interfaceid": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1995.949584] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.420140] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.219s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1996.983988] env[63371]: INFO nova.scheduler.client.report [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted allocation for migration 0af0bbaf-03e7-4421-a1d3-0e2f3ab8aae2 [ 1997.133054] env[63371]: INFO nova.compute.manager [None req-b3147f4f-4e19-4628-9940-ad2f7cd09385 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Get console output [ 1997.133401] env[63371]: WARNING nova.virt.vmwareapi.driver [None req-b3147f4f-4e19-4628-9940-ad2f7cd09385 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] The console log is missing. Check your VSPC configuration [ 1997.464020] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66575c76-80d2-4539-8f1b-da04dbb0e84f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.483799] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance '6e9b44fb-153c-4aa8-87ec-04d27ab764ff' progress to 0 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1997.489192] env[63371]: DEBUG oslo_concurrency.lockutils [None req-85cb6a5e-5d1c-4246-beb7-d82103a6866b tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "0a174705-f4ec-407c-b7ea-0945d5db46cf" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.980s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.606429] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.606645] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.606829] env[63371]: INFO nova.compute.manager [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Shelving [ 1997.991094] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1997.991659] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9845708e-ee24-40ad-8e7a-02c5431ed0ef {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.000146] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1998.000146] env[63371]: value = "task-1775141" [ 1998.000146] env[63371]: _type = "Task" [ 1998.000146] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.008512] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775141, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.114080] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1998.114447] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a02b18b-f103-4e41-adaa-ffbf07405f84 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.121573] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 1998.121573] env[63371]: value = "task-1775142" [ 1998.121573] env[63371]: _type = "Task" [ 1998.121573] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.129944] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775142, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.511109] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775141, 'name': PowerOffVM_Task, 'duration_secs': 0.238296} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.511109] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1998.511595] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance '6e9b44fb-153c-4aa8-87ec-04d27ab764ff' progress to 17 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1998.631560] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775142, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.634561] env[63371]: DEBUG oslo_vmware.rw_handles [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522d6e2e-9878-0fa8-8694-0ed776333d24/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1998.635820] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753e9a0d-f72c-499c-8c03-7cede2b3116d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.642041] env[63371]: DEBUG oslo_vmware.rw_handles [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522d6e2e-9878-0fa8-8694-0ed776333d24/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1998.642206] env[63371]: ERROR oslo_vmware.rw_handles [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522d6e2e-9878-0fa8-8694-0ed776333d24/disk-0.vmdk due to incomplete transfer. [ 1998.642414] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d1a1edf8-f5da-4bbc-9e3b-6534b5b10957 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.648512] env[63371]: DEBUG oslo_vmware.rw_handles [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/522d6e2e-9878-0fa8-8694-0ed776333d24/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1998.648704] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Uploaded image d0f4f856-b3e0-4946-9bc9-451dfdc1d434 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1998.650988] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1998.651219] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-47dc93c1-e530-465a-be7b-c215385b8a56 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.656613] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1998.656613] env[63371]: value = "task-1775143" [ 1998.656613] env[63371]: _type = "Task" [ 1998.656613] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.664528] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775143, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.018509] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1999.018786] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1999.018949] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1999.019146] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1999.019295] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1999.019443] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1999.019648] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1999.019832] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1999.020064] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1999.020253] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1999.020430] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1999.025640] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ebd1106b-ed2d-4b4b-8c9e-04e1b26b54ee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.040928] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 1999.040928] env[63371]: value = "task-1775144" [ 1999.040928] env[63371]: _type = "Task" [ 1999.040928] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.049123] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775144, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.131882] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775142, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.169142] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775143, 'name': Destroy_Task, 'duration_secs': 0.324129} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.169550] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Destroyed the VM [ 1999.169947] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1999.170316] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-39f4b7d2-350f-48e3-bc4f-47b9efcfb54a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.178129] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 1999.178129] env[63371]: value = "task-1775145" [ 1999.178129] env[63371]: _type = "Task" [ 1999.178129] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.185773] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775145, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.550987] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775144, 'name': ReconfigVM_Task, 'duration_secs': 0.369919} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.551323] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance '6e9b44fb-153c-4aa8-87ec-04d27ab764ff' progress to 33 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1999.634057] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775142, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.686882] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775145, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.057889] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2000.058150] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2000.058307] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2000.058486] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2000.058629] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2000.058774] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2000.058974] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2000.059144] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2000.059309] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2000.059509] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2000.059684] env[63371]: DEBUG nova.virt.hardware [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2000.065067] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Reconfiguring VM instance instance-00000078 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2000.065359] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c983c51-d7df-4fa9-b1fb-6e5d81d304d8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.084637] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2000.084637] env[63371]: value = "task-1775146" [ 2000.084637] env[63371]: _type = "Task" [ 2000.084637] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.092337] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775146, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.132072] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775142, 'name': PowerOffVM_Task, 'duration_secs': 1.765759} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.132349] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2000.133107] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1696a5-c429-4daa-a186-504febd53df1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.150906] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623fb4d3-a19d-4774-9177-48ae5881e478 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.188216] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775145, 'name': RemoveSnapshot_Task, 'duration_secs': 0.684706} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.189156] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2000.189438] env[63371]: DEBUG nova.compute.manager [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2000.190224] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c0e0e60-1c74-4010-beea-c0a55e4584e6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.594185] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775146, 'name': ReconfigVM_Task, 'duration_secs': 0.188368} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.594551] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Reconfigured VM instance instance-00000078 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2000.595279] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc3b76a-4639-43ff-91bb-e5c059b00730 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.616677] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 6e9b44fb-153c-4aa8-87ec-04d27ab764ff/6e9b44fb-153c-4aa8-87ec-04d27ab764ff.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2000.616937] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2952a6c7-fbb5-4c09-b099-115144757b93 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.634477] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2000.634477] env[63371]: value = "task-1775147" [ 2000.634477] env[63371]: _type = "Task" [ 2000.634477] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.641824] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775147, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.661070] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2000.661363] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4a86fa90-d3eb-4c83-818f-7478d1799bb4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.667633] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2000.667633] env[63371]: value = "task-1775148" [ 2000.667633] env[63371]: _type = "Task" [ 2000.667633] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.676709] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775148, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.702770] env[63371]: INFO nova.compute.manager [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Shelve offloading [ 2000.704488] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2000.704714] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95fb37f9-75e3-485b-9883-43fc52a56087 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.710875] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2000.710875] env[63371]: value = "task-1775149" [ 2000.710875] env[63371]: _type = "Task" [ 2000.710875] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.718595] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775149, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.143877] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775147, 'name': ReconfigVM_Task, 'duration_secs': 0.262826} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.144168] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 6e9b44fb-153c-4aa8-87ec-04d27ab764ff/6e9b44fb-153c-4aa8-87ec-04d27ab764ff.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2001.144456] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance '6e9b44fb-153c-4aa8-87ec-04d27ab764ff' progress to 50 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2001.176530] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775148, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.220856] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2001.221061] env[63371]: DEBUG nova.compute.manager [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2001.221728] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8dd7ac-66a0-496b-a69b-f79496340a6d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.226962] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2001.227139] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2001.227310] env[63371]: DEBUG nova.network.neutron [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2001.650728] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac88f87-5b12-4b51-956f-40c63449b997 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.672372] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fe8abcc-7025-459a-b9c8-d0a5ae62b8c8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.679372] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775148, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.692016] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance '6e9b44fb-153c-4aa8-87ec-04d27ab764ff' progress to 67 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2001.932484] env[63371]: DEBUG nova.network.neutron [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updating instance_info_cache with network_info: [{"id": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "address": "fa:16:3e:2e:85:be", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d87d33-0a", "ovs_interfaceid": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2002.180344] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775148, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.227968] env[63371]: DEBUG nova.network.neutron [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Port 6fc15567-65bf-42ad-9a0a-1b1cee20b40b binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2002.435659] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2002.641949] env[63371]: DEBUG nova.compute.manager [req-c0eca8b4-648e-4af5-86ad-362a466ea56c req-5bc79cb6-5b54-447f-981a-2443816ae451 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Received event network-vif-unplugged-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2002.642190] env[63371]: DEBUG oslo_concurrency.lockutils [req-c0eca8b4-648e-4af5-86ad-362a466ea56c req-5bc79cb6-5b54-447f-981a-2443816ae451 service nova] Acquiring lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.642426] env[63371]: DEBUG oslo_concurrency.lockutils [req-c0eca8b4-648e-4af5-86ad-362a466ea56c req-5bc79cb6-5b54-447f-981a-2443816ae451 service nova] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.642562] env[63371]: DEBUG oslo_concurrency.lockutils [req-c0eca8b4-648e-4af5-86ad-362a466ea56c req-5bc79cb6-5b54-447f-981a-2443816ae451 service nova] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.642727] env[63371]: DEBUG nova.compute.manager [req-c0eca8b4-648e-4af5-86ad-362a466ea56c req-5bc79cb6-5b54-447f-981a-2443816ae451 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] No waiting events found dispatching network-vif-unplugged-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2002.642892] env[63371]: WARNING nova.compute.manager [req-c0eca8b4-648e-4af5-86ad-362a466ea56c req-5bc79cb6-5b54-447f-981a-2443816ae451 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Received unexpected event network-vif-unplugged-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e for instance with vm_state shelved and task_state shelving_offloading. [ 2002.679701] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775148, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.741516] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2002.742435] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9790252-72b5-4cd7-9e92-4efa982ab17a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.750218] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2002.750462] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9e18e0d-7786-4219-bf89-76417606b60e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.180476] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775148, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.249878] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.250190] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2003.250285] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2003.515022] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2003.515235] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2003.515426] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleting the datastore file [datastore1] 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2003.515690] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7009bd8b-7a0f-4791-851e-98c9278b3415 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.522022] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2003.522022] env[63371]: value = "task-1775151" [ 2003.522022] env[63371]: _type = "Task" [ 2003.522022] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.529555] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775151, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.681684] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775148, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.032248] env[63371]: DEBUG oslo_vmware.api [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775151, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18217} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.032526] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2004.032715] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2004.032889] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2004.053736] env[63371]: INFO nova.scheduler.client.report [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleted allocations for instance 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d [ 2004.181562] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775148, 'name': CreateSnapshot_Task, 'duration_secs': 3.108408} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2004.181776] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2004.182550] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b45757-8615-4c46-be71-938745f2500f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.282978] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2004.283192] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2004.283370] env[63371]: DEBUG nova.network.neutron [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2004.557906] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2004.558196] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2004.558445] env[63371]: DEBUG nova.objects.instance [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lazy-loading 'resources' on Instance uuid 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2004.667568] env[63371]: DEBUG nova.compute.manager [req-82e28243-3500-406c-9e58-f8274c7c2909 req-263e7e82-3f8c-4d09-8b5b-f79896e3cfac service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Received event network-changed-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2004.667731] env[63371]: DEBUG nova.compute.manager [req-82e28243-3500-406c-9e58-f8274c7c2909 req-263e7e82-3f8c-4d09-8b5b-f79896e3cfac service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Refreshing instance network info cache due to event network-changed-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2004.667899] env[63371]: DEBUG oslo_concurrency.lockutils [req-82e28243-3500-406c-9e58-f8274c7c2909 req-263e7e82-3f8c-4d09-8b5b-f79896e3cfac service nova] Acquiring lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2004.668054] env[63371]: DEBUG oslo_concurrency.lockutils [req-82e28243-3500-406c-9e58-f8274c7c2909 req-263e7e82-3f8c-4d09-8b5b-f79896e3cfac service nova] Acquired lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2004.668217] env[63371]: DEBUG nova.network.neutron [req-82e28243-3500-406c-9e58-f8274c7c2909 req-263e7e82-3f8c-4d09-8b5b-f79896e3cfac service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Refreshing network info cache for port 14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2004.699783] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2004.700130] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-55d1afd5-d7a9-41d1-ad5f-e28026b47ab4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.710793] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2004.710793] env[63371]: value = "task-1775152" [ 2004.710793] env[63371]: _type = "Task" [ 2004.710793] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2004.719436] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775152, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.055823] env[63371]: DEBUG nova.network.neutron [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance_info_cache with network_info: [{"id": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "address": "fa:16:3e:54:78:e9", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc15567-65", "ovs_interfaceid": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2005.060495] env[63371]: DEBUG nova.objects.instance [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lazy-loading 'numa_topology' on Instance uuid 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2005.221899] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775152, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.378896] env[63371]: DEBUG nova.network.neutron [req-82e28243-3500-406c-9e58-f8274c7c2909 req-263e7e82-3f8c-4d09-8b5b-f79896e3cfac service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updated VIF entry in instance network info cache for port 14d87d33-0ac4-480f-b86e-c9e13b3e3e4e. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2005.379297] env[63371]: DEBUG nova.network.neutron [req-82e28243-3500-406c-9e58-f8274c7c2909 req-263e7e82-3f8c-4d09-8b5b-f79896e3cfac service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updating instance_info_cache with network_info: [{"id": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "address": "fa:16:3e:2e:85:be", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap14d87d33-0a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2005.558749] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2005.563070] env[63371]: DEBUG nova.objects.base [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Object Instance<9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d> lazy-loaded attributes: resources,numa_topology {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2005.652744] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9204149f-a453-42b2-a59d-4220afef1b70 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.661345] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9b72f76-c4b2-448b-b253-76936edad183 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.360317] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.361032] env[63371]: DEBUG oslo_concurrency.lockutils [req-82e28243-3500-406c-9e58-f8274c7c2909 req-263e7e82-3f8c-4d09-8b5b-f79896e3cfac service nova] Releasing lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.367445] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0e8ec16-bc95-4296-b5fa-d344f62e2196 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.379877] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d842b4ef-f789-43e1-bab6-075e928aedaf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.383512] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775152, 'name': CloneVM_Task, 'duration_secs': 0.989522} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.384514] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Created linked-clone VM from snapshot [ 2006.385195] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acbe0d5-8679-41af-84e9-24200c68caba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.388338] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1199090d-5e23-4f71-93b9-6e2cbd603f7b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.398422] env[63371]: DEBUG nova.compute.provider_tree [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2006.416246] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12c6912-4be2-4bf3-b49a-9b8b4bbdd15a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.419072] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Uploading image 6a997963-9627-47ab-bf68-1b38285cf7d9 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2006.424781] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance '6e9b44fb-153c-4aa8-87ec-04d27ab764ff' progress to 83 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2006.443929] env[63371]: DEBUG oslo_vmware.rw_handles [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2006.443929] env[63371]: value = "vm-368523" [ 2006.443929] env[63371]: _type = "VirtualMachine" [ 2006.443929] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2006.444193] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7ff63e4e-6f01-406e-beff-2350552065fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.450262] env[63371]: DEBUG oslo_vmware.rw_handles [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lease: (returnval){ [ 2006.450262] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52225313-25cd-3c30-97ef-5b234b9a8aae" [ 2006.450262] env[63371]: _type = "HttpNfcLease" [ 2006.450262] env[63371]: } obtained for exporting VM: (result){ [ 2006.450262] env[63371]: value = "vm-368523" [ 2006.450262] env[63371]: _type = "VirtualMachine" [ 2006.450262] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2006.450578] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the lease: (returnval){ [ 2006.450578] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52225313-25cd-3c30-97ef-5b234b9a8aae" [ 2006.450578] env[63371]: _type = "HttpNfcLease" [ 2006.450578] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2006.457602] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2006.457602] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52225313-25cd-3c30-97ef-5b234b9a8aae" [ 2006.457602] env[63371]: _type = "HttpNfcLease" [ 2006.457602] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2006.913455] env[63371]: DEBUG nova.scheduler.client.report [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2006.930565] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2006.931088] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-632fb30e-01f2-4a2a-81da-b5af314af0c4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.939403] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2006.939403] env[63371]: value = "task-1775154" [ 2006.939403] env[63371]: _type = "Task" [ 2006.939403] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.947980] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775154, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.957348] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2006.957348] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52225313-25cd-3c30-97ef-5b234b9a8aae" [ 2006.957348] env[63371]: _type = "HttpNfcLease" [ 2006.957348] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2006.957627] env[63371]: DEBUG oslo_vmware.rw_handles [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2006.957627] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52225313-25cd-3c30-97ef-5b234b9a8aae" [ 2006.957627] env[63371]: _type = "HttpNfcLease" [ 2006.957627] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2006.958341] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0a2974-4e53-458a-9c6b-69328e51ccea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.965936] env[63371]: DEBUG oslo_vmware.rw_handles [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5283aef4-a7a9-a7ea-af2b-950b3da1632d/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2006.966124] env[63371]: DEBUG oslo_vmware.rw_handles [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5283aef4-a7a9-a7ea-af2b-950b3da1632d/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2007.073641] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4fd37ad2-16e5-406a-aedd-e0c650fa7de9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.418502] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.860s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.449270] env[63371]: DEBUG oslo_vmware.api [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775154, 'name': PowerOnVM_Task, 'duration_secs': 0.414543} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.449543] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2007.449809] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4b9d84ec-4445-42b7-955e-2b6cd16e3833 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance '6e9b44fb-153c-4aa8-87ec-04d27ab764ff' progress to 100 {{(pid=63371) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2007.927940] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2bf415bc-44c4-4797-882a-aaf56d52f855 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.044s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2007.929014] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.569s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2007.929371] env[63371]: INFO nova.compute.manager [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Unshelving [ 2008.940118] env[63371]: DEBUG nova.compute.utils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2009.444215] env[63371]: INFO nova.virt.block_device [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Booting with volume 44fef38f-3bfe-4eb9-814e-26572a81abc3 at /dev/sdb [ 2009.483674] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-28eb4c7a-8708-43b8-a63b-fa5da8e2e812 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.493326] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7327f4ac-73f9-44a4-83a6-48970da3f142 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.522886] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-88f8869d-f083-4bb9-bd97-3ffea1d63547 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.531077] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604b4fbd-d584-4e6d-af89-035c6f8ff28b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.558230] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81f865c-1695-4530-9aaf-ddef1fc8e4ad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.565650] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c193d68f-f370-477e-ba87-dabeade44289 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.579327] env[63371]: DEBUG nova.virt.block_device [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updating existing volume attachment record: 0d550b81-0f70-46d7-a50e-722a9385f6d6 {{(pid=63371) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2010.159380] env[63371]: DEBUG nova.network.neutron [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Port 6fc15567-65bf-42ad-9a0a-1b1cee20b40b binding to destination host cpu-1 is already ACTIVE {{(pid=63371) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2010.159706] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2010.159816] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2010.159982] env[63371]: DEBUG nova.network.neutron [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2010.888235] env[63371]: DEBUG nova.network.neutron [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance_info_cache with network_info: [{"id": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "address": "fa:16:3e:54:78:e9", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc15567-65", "ovs_interfaceid": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.391796] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2011.895850] env[63371]: DEBUG nova.compute.manager [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63371) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:898}} [ 2011.896127] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2011.896369] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.399175] env[63371]: DEBUG nova.objects.instance [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'migration_context' on Instance uuid 6e9b44fb-153c-4aa8-87ec-04d27ab764ff {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2013.095270] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7bc131-0b94-4007-b389-6b5154b12931 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.103336] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88670fdb-a752-4dd8-a7ef-20c29276eeec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.133559] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9e8bfe0-8996-459c-b93e-05032a72fdf1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.141592] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d6d7e7f-4476-4890-9bd8-cfcf31a5947a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.154605] env[63371]: DEBUG nova.compute.provider_tree [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2013.657679] env[63371]: DEBUG nova.scheduler.client.report [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2014.667245] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.771s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2015.178660] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2015.178951] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2015.179333] env[63371]: DEBUG nova.objects.instance [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lazy-loading 'pci_requests' on Instance uuid 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2015.685596] env[63371]: DEBUG nova.objects.instance [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lazy-loading 'numa_topology' on Instance uuid 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2015.696843] env[63371]: DEBUG oslo_vmware.rw_handles [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5283aef4-a7a9-a7ea-af2b-950b3da1632d/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2015.697959] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b897452b-b317-448f-acab-b4515c39eccd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.705793] env[63371]: DEBUG oslo_vmware.rw_handles [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5283aef4-a7a9-a7ea-af2b-950b3da1632d/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2015.705981] env[63371]: ERROR oslo_vmware.rw_handles [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5283aef4-a7a9-a7ea-af2b-950b3da1632d/disk-0.vmdk due to incomplete transfer. [ 2015.707294] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-24bed3f8-d84c-4f65-9b6c-4a466ada15a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.712987] env[63371]: DEBUG oslo_vmware.rw_handles [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5283aef4-a7a9-a7ea-af2b-950b3da1632d/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2015.713200] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Uploaded image 6a997963-9627-47ab-bf68-1b38285cf7d9 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2015.715377] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2015.715625] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-e29be52c-7034-4ba3-ac47-e202bebd6d8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.721446] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2015.721446] env[63371]: value = "task-1775159" [ 2015.721446] env[63371]: _type = "Task" [ 2015.721446] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.729304] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775159, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.191891] env[63371]: INFO nova.compute.claims [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2016.209082] env[63371]: INFO nova.compute.manager [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Swapping old allocation on dict_keys(['c079ebb1-2fa2-4df9-bdab-118e305653c1']) held by migration c2930f9f-101b-425b-850a-cbb15e0f7c65 for instance [ 2016.231766] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775159, 'name': Destroy_Task, 'duration_secs': 0.430262} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.232614] env[63371]: DEBUG nova.scheduler.client.report [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Overwriting current allocation {'allocations': {'c079ebb1-2fa2-4df9-bdab-118e305653c1': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 190}}, 'project_id': 'f98ab0107f5040139ef8be7c3ae22207', 'user_id': 'f85b2454eed34665b92a1ebc087353c1', 'consumer_generation': 1} on consumer 6e9b44fb-153c-4aa8-87ec-04d27ab764ff {{(pid=63371) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2032}} [ 2016.234311] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Destroyed the VM [ 2016.234580] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2016.234997] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-83f98df4-9c64-458b-8fae-9494fa0c67ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.240720] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2016.240720] env[63371]: value = "task-1775160" [ 2016.240720] env[63371]: _type = "Task" [ 2016.240720] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2016.248103] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775160, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2016.339071] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2016.339071] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2016.339243] env[63371]: DEBUG nova.network.neutron [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2016.750225] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775160, 'name': RemoveSnapshot_Task, 'duration_secs': 0.355941} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2016.751279] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2016.751279] env[63371]: DEBUG nova.compute.manager [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2016.751982] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87a51fd-b712-4c28-851c-20ca71cd0b98 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.059911] env[63371]: DEBUG nova.network.neutron [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance_info_cache with network_info: [{"id": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "address": "fa:16:3e:54:78:e9", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6fc15567-65", "ovs_interfaceid": "6fc15567-65bf-42ad-9a0a-1b1cee20b40b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2017.262714] env[63371]: INFO nova.compute.manager [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Shelve offloading [ 2017.264316] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2017.264577] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6dd16324-b494-468a-a555-f74768c48e46 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.271935] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2017.271935] env[63371]: value = "task-1775161" [ 2017.271935] env[63371]: _type = "Task" [ 2017.271935] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.279491] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.280784] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae34f008-ae31-4961-90f2-fba11558ca8a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.287060] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6212045d-f80c-48ec-bf97-7acc399c1f57 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.315560] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0973bd5d-c311-4966-8260-24fb6be3e582 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.322232] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4099c8-4a7c-464a-8b3a-53ba537a7926 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.334802] env[63371]: DEBUG nova.compute.provider_tree [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2017.563520] env[63371]: DEBUG oslo_concurrency.lockutils [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-6e9b44fb-153c-4aa8-87ec-04d27ab764ff" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2017.563922] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2017.564238] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f1e357a3-bdb0-426a-a521-caf28706085c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.571668] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2017.571668] env[63371]: value = "task-1775162" [ 2017.571668] env[63371]: _type = "Task" [ 2017.571668] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.579605] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775162, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.782042] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2017.782441] env[63371]: DEBUG nova.compute.manager [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2017.783022] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc49a17-6717-46b8-b8f8-28733002a88c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.788490] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2017.788653] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2017.788841] env[63371]: DEBUG nova.network.neutron [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2017.837872] env[63371]: DEBUG nova.scheduler.client.report [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2018.081407] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775162, 'name': PowerOffVM_Task, 'duration_secs': 0.192366} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.081689] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2018.082386] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2018.082605] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2018.082761] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2018.082934] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2018.083090] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2018.083236] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2018.083434] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2018.083588] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2018.083746] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2018.083902] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2018.084081] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2018.089037] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87b36baa-cc41-4f00-a6e1-ef136c8bcabf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.104329] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2018.104329] env[63371]: value = "task-1775163" [ 2018.104329] env[63371]: _type = "Task" [ 2018.104329] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.114550] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775163, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.344546] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.165s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.385748] env[63371]: INFO nova.network.neutron [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updating port 14d87d33-0ac4-480f-b86e-c9e13b3e3e4e with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2018.618648] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775163, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.645956] env[63371]: DEBUG nova.network.neutron [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updating instance_info_cache with network_info: [{"id": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "address": "fa:16:3e:00:9c:75", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa21caeee-a9", "ovs_interfaceid": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.115855] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775163, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.149594] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.363250] env[63371]: DEBUG nova.compute.manager [req-1cd38e88-9917-44b7-94e2-f1448a84b270 req-e71de91a-ee7e-4ece-bb55-df6e12c8124a service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Received event network-vif-unplugged-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2019.363473] env[63371]: DEBUG oslo_concurrency.lockutils [req-1cd38e88-9917-44b7-94e2-f1448a84b270 req-e71de91a-ee7e-4ece-bb55-df6e12c8124a service nova] Acquiring lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2019.363680] env[63371]: DEBUG oslo_concurrency.lockutils [req-1cd38e88-9917-44b7-94e2-f1448a84b270 req-e71de91a-ee7e-4ece-bb55-df6e12c8124a service nova] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.363845] env[63371]: DEBUG oslo_concurrency.lockutils [req-1cd38e88-9917-44b7-94e2-f1448a84b270 req-e71de91a-ee7e-4ece-bb55-df6e12c8124a service nova] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.364041] env[63371]: DEBUG nova.compute.manager [req-1cd38e88-9917-44b7-94e2-f1448a84b270 req-e71de91a-ee7e-4ece-bb55-df6e12c8124a service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] No waiting events found dispatching network-vif-unplugged-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2019.364179] env[63371]: WARNING nova.compute.manager [req-1cd38e88-9917-44b7-94e2-f1448a84b270 req-e71de91a-ee7e-4ece-bb55-df6e12c8124a service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Received unexpected event network-vif-unplugged-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 for instance with vm_state shelved and task_state shelving_offloading. [ 2019.450724] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2019.451650] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd9ac83-0f9b-4d05-96ad-bf94b4abc166 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.459208] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2019.459435] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4ee144d2-c461-4dcd-b255-20ca89aa46ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.565410] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2019.565693] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2019.565891] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleting the datastore file [datastore1] 3d2dabd1-5c4f-4997-843c-e1e124b687ba {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2019.566183] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65fc27c6-d85b-46d9-ac8e-a2253e653d06 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.573161] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2019.573161] env[63371]: value = "task-1775165" [ 2019.573161] env[63371]: _type = "Task" [ 2019.573161] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.580594] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775165, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.615351] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775163, 'name': ReconfigVM_Task, 'duration_secs': 1.151254} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.616163] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-288c84ca-0147-4827-8ac0-d831489abb78 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.634571] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2019.634848] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2019.634968] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2019.635167] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2019.635315] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2019.635499] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2019.635708] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2019.635867] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2019.636045] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2019.636213] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2019.636383] env[63371]: DEBUG nova.virt.hardware [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2019.637151] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10e620b6-8476-4be4-b3de-3ef55d9606d7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.642057] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2019.642057] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5220eee8-57ff-3f93-7f6f-f3c4bd97fd04" [ 2019.642057] env[63371]: _type = "Task" [ 2019.642057] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.649294] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5220eee8-57ff-3f93-7f6f-f3c4bd97fd04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.985734] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2019.985891] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.986087] env[63371]: DEBUG nova.network.neutron [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2020.083111] env[63371]: DEBUG oslo_vmware.api [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775165, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129391} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.083359] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2020.083536] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2020.083713] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2020.105743] env[63371]: INFO nova.scheduler.client.report [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleted allocations for instance 3d2dabd1-5c4f-4997-843c-e1e124b687ba [ 2020.153808] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5220eee8-57ff-3f93-7f6f-f3c4bd97fd04, 'name': SearchDatastore_Task, 'duration_secs': 0.006391} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.159065] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Reconfiguring VM instance instance-00000078 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2020.159533] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2a337c63-994f-425b-83cd-22c70834be1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.176712] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2020.176712] env[63371]: value = "task-1775166" [ 2020.176712] env[63371]: _type = "Task" [ 2020.176712] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.183980] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775166, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.431243] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.431482] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.431651] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.431793] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2020.431939] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2020.609690] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2020.609954] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2020.610184] env[63371]: DEBUG nova.objects.instance [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'resources' on Instance uuid 3d2dabd1-5c4f-4997-843c-e1e124b687ba {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2020.677804] env[63371]: DEBUG nova.network.neutron [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updating instance_info_cache with network_info: [{"id": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "address": "fa:16:3e:2e:85:be", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d87d33-0a", "ovs_interfaceid": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2020.688128] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775166, 'name': ReconfigVM_Task, 'duration_secs': 0.155594} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.688875] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Reconfigured VM instance instance-00000078 to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2020.689644] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9991d23c-1b69-4cc2-a443-859b1de3ae1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.712024] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 6e9b44fb-153c-4aa8-87ec-04d27ab764ff/6e9b44fb-153c-4aa8-87ec-04d27ab764ff.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2020.712242] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97f0a224-128b-40fc-ab78-93fc4a67588d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.729312] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2020.729312] env[63371]: value = "task-1775167" [ 2020.729312] env[63371]: _type = "Task" [ 2020.729312] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.736848] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775167, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.935698] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2021.112626] env[63371]: DEBUG nova.objects.instance [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'numa_topology' on Instance uuid 3d2dabd1-5c4f-4997-843c-e1e124b687ba {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2021.183122] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2021.204401] env[63371]: DEBUG nova.virt.hardware [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='ae684c1b5f557e06a96779ba68d06902',container_format='bare',created_at=2024-12-11T21:42:22Z,direct_url=,disk_format='vmdk',id=d0f4f856-b3e0-4946-9bc9-451dfdc1d434,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-200781019-shelved',owner='da713632f95146f1986c0d8a9e529ca0',properties=ImageMetaProps,protected=,size=31670272,status='active',tags=,updated_at=2024-12-11T21:42:35Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2021.204732] env[63371]: DEBUG nova.virt.hardware [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2021.204891] env[63371]: DEBUG nova.virt.hardware [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2021.205084] env[63371]: DEBUG nova.virt.hardware [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2021.205230] env[63371]: DEBUG nova.virt.hardware [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2021.205375] env[63371]: DEBUG nova.virt.hardware [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2021.205601] env[63371]: DEBUG nova.virt.hardware [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2021.205775] env[63371]: DEBUG nova.virt.hardware [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2021.205935] env[63371]: DEBUG nova.virt.hardware [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2021.206107] env[63371]: DEBUG nova.virt.hardware [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2021.206277] env[63371]: DEBUG nova.virt.hardware [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2021.207126] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3a21e4-e65f-42f7-84d5-4395328176ed {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.215028] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb16fb5-77c6-415f-9c63-4215181c6032 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.227765] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:85:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14d87d33-0ac4-480f-b86e-c9e13b3e3e4e', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2021.234960] env[63371]: DEBUG oslo.service.loopingcall [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2021.235248] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2021.237901] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c276784-a09b-4077-9580-9640e9ba0cd3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.257754] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775167, 'name': ReconfigVM_Task, 'duration_secs': 0.238969} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.258818] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 6e9b44fb-153c-4aa8-87ec-04d27ab764ff/6e9b44fb-153c-4aa8-87ec-04d27ab764ff.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2021.259096] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2021.259096] env[63371]: value = "task-1775168" [ 2021.259096] env[63371]: _type = "Task" [ 2021.259096] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.259726] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc6c92d5-fc89-48f2-9e6f-79d3f486363b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.269651] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775168, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.282600] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9eaa7a4-0877-4fd4-86c0-06a2da8a332f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.300493] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56efe41b-9b1b-44b3-8906-58912c2e104d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.318161] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26519b0b-e952-409b-8ace-6579744bae59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.324785] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2021.325021] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07c82997-4bc8-4dd3-bc90-79c54b63e6f7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.330867] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2021.330867] env[63371]: value = "task-1775169" [ 2021.330867] env[63371]: _type = "Task" [ 2021.330867] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.338050] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775169, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.403377] env[63371]: DEBUG nova.compute.manager [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Received event network-changed-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2021.403569] env[63371]: DEBUG nova.compute.manager [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Refreshing instance network info cache due to event network-changed-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2021.403824] env[63371]: DEBUG oslo_concurrency.lockutils [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] Acquiring lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.403977] env[63371]: DEBUG oslo_concurrency.lockutils [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] Acquired lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.404180] env[63371]: DEBUG nova.network.neutron [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Refreshing network info cache for port a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2021.614890] env[63371]: DEBUG nova.objects.base [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Object Instance<3d2dabd1-5c4f-4997-843c-e1e124b687ba> lazy-loaded attributes: resources,numa_topology {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2021.696412] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1196b1c2-31af-4383-a166-7c0593fce294 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.704549] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e79793-02a9-461d-9fb9-58e609339790 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.734804] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b1805f6-c5b1-4861-91a0-41372860eb3e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.742041] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8b5ac3-8a3a-4978-97d7-af562ba42ae6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.754658] env[63371]: DEBUG nova.compute.provider_tree [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2021.770901] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775168, 'name': CreateVM_Task, 'duration_secs': 0.327269} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.771061] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2021.771673] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2021.771837] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2021.772215] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2021.772442] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43d4bfe0-112a-41d7-ada9-7a64d2fbc5a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.776999] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2021.776999] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ba4fd3-cfe4-1680-eb4a-dcc6f20a2986" [ 2021.776999] env[63371]: _type = "Task" [ 2021.776999] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.784627] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ba4fd3-cfe4-1680-eb4a-dcc6f20a2986, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.840881] env[63371]: DEBUG oslo_vmware.api [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775169, 'name': PowerOnVM_Task, 'duration_secs': 0.376973} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.841129] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2022.092533] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.118570] env[63371]: DEBUG nova.network.neutron [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updated VIF entry in instance network info cache for port a21caeee-a9c4-4ead-8c4e-4dc84446b5b4. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2022.118927] env[63371]: DEBUG nova.network.neutron [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updating instance_info_cache with network_info: [{"id": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "address": "fa:16:3e:00:9c:75", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": null, "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapa21caeee-a9", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2022.257745] env[63371]: DEBUG nova.scheduler.client.report [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2022.287360] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2022.287598] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Processing image d0f4f856-b3e0-4946-9bc9-451dfdc1d434 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2022.287824] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434/d0f4f856-b3e0-4946-9bc9-451dfdc1d434.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2022.287964] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "[datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434/d0f4f856-b3e0-4946-9bc9-451dfdc1d434.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2022.288156] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2022.288394] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20ce9333-f6e1-4ec1-a6c7-3dd7133ee0d2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.305853] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2022.306073] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2022.306794] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb21bdb5-672c-46c8-9c1a-dc1d305a7161 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.311677] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2022.311677] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524338d8-94f9-aedc-4807-f17741a02a39" [ 2022.311677] env[63371]: _type = "Task" [ 2022.311677] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.318890] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524338d8-94f9-aedc-4807-f17741a02a39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.621619] env[63371]: DEBUG oslo_concurrency.lockutils [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] Releasing lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2022.621907] env[63371]: DEBUG nova.compute.manager [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Received event network-vif-plugged-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2022.622125] env[63371]: DEBUG oslo_concurrency.lockutils [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] Acquiring lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.622340] env[63371]: DEBUG oslo_concurrency.lockutils [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2022.622500] env[63371]: DEBUG oslo_concurrency.lockutils [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2022.622667] env[63371]: DEBUG nova.compute.manager [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] No waiting events found dispatching network-vif-plugged-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2022.622836] env[63371]: WARNING nova.compute.manager [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Received unexpected event network-vif-plugged-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e for instance with vm_state shelved_offloaded and task_state spawning. [ 2022.622994] env[63371]: DEBUG nova.compute.manager [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Received event network-changed-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2022.623166] env[63371]: DEBUG nova.compute.manager [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Refreshing instance network info cache due to event network-changed-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2022.623348] env[63371]: DEBUG oslo_concurrency.lockutils [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] Acquiring lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2022.623473] env[63371]: DEBUG oslo_concurrency.lockutils [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] Acquired lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2022.623628] env[63371]: DEBUG nova.network.neutron [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Refreshing network info cache for port 14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2022.764860] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.155s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2022.767227] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.832s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2022.767401] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2022.767552] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2022.768587] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b52bcaa-f9fa-405d-9317-b5a28a88f2ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.776785] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e7d871-c0bd-431f-bad0-d000a02019ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.790419] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49be10d-9fb9-41b0-bdcd-48fd9ac4a75d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.796816] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f39ede-4f3d-45d5-b5f0-d11fb214fe06 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.826965] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180745MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2022.827119] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2022.827308] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2022.836762] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Preparing fetch location {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2022.837064] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Fetch image to [datastore1] OSTACK_IMG_3e41527e-3a96-441b-bd95-8fab4139512f/OSTACK_IMG_3e41527e-3a96-441b-bd95-8fab4139512f.vmdk {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2022.837253] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Downloading stream optimized image d0f4f856-b3e0-4946-9bc9-451dfdc1d434 to [datastore1] OSTACK_IMG_3e41527e-3a96-441b-bd95-8fab4139512f/OSTACK_IMG_3e41527e-3a96-441b-bd95-8fab4139512f.vmdk on the data store datastore1 as vApp {{(pid=63371) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2022.837419] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Downloading image file data d0f4f856-b3e0-4946-9bc9-451dfdc1d434 to the ESX as VM named 'OSTACK_IMG_3e41527e-3a96-441b-bd95-8fab4139512f' {{(pid=63371) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2022.851158] env[63371]: INFO nova.compute.manager [None req-bc88217c-49f3-463e-bd5f-6335584e465f tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance to original state: 'active' [ 2022.907854] env[63371]: DEBUG oslo_vmware.rw_handles [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2022.907854] env[63371]: value = "resgroup-9" [ 2022.907854] env[63371]: _type = "ResourcePool" [ 2022.907854] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2022.908126] env[63371]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f062f3c6-bccc-47bd-bc41-1da712e15c4b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.927891] env[63371]: DEBUG oslo_vmware.rw_handles [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lease: (returnval){ [ 2022.927891] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52954f42-19d4-8eea-1523-21945f54b89d" [ 2022.927891] env[63371]: _type = "HttpNfcLease" [ 2022.927891] env[63371]: } obtained for vApp import into resource pool (val){ [ 2022.927891] env[63371]: value = "resgroup-9" [ 2022.927891] env[63371]: _type = "ResourcePool" [ 2022.927891] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2022.928294] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the lease: (returnval){ [ 2022.928294] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52954f42-19d4-8eea-1523-21945f54b89d" [ 2022.928294] env[63371]: _type = "HttpNfcLease" [ 2022.928294] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2022.934240] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2022.934240] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52954f42-19d4-8eea-1523-21945f54b89d" [ 2022.934240] env[63371]: _type = "HttpNfcLease" [ 2022.934240] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2023.275120] env[63371]: DEBUG oslo_concurrency.lockutils [None req-5ef0bab4-81f1-4cd9-9d4b-3f1fc4144cc0 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.668s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.276098] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 1.184s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.276289] env[63371]: INFO nova.compute.manager [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Unshelving [ 2023.408054] env[63371]: DEBUG nova.network.neutron [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updated VIF entry in instance network info cache for port 14d87d33-0ac4-480f-b86e-c9e13b3e3e4e. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2023.408442] env[63371]: DEBUG nova.network.neutron [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updating instance_info_cache with network_info: [{"id": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "address": "fa:16:3e:2e:85:be", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14d87d33-0a", "ovs_interfaceid": "14d87d33-0ac4-480f-b86e-c9e13b3e3e4e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2023.437873] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2023.437873] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52954f42-19d4-8eea-1523-21945f54b89d" [ 2023.437873] env[63371]: _type = "HttpNfcLease" [ 2023.437873] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2023.859058] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 6df9af10-0053-4696-920a-10ab2af67ef5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.859058] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 29791f6c-edec-44b3-828b-0e306d167c42 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.859058] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 0a174705-f4ec-407c-b7ea-0945d5db46cf actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.859058] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 6e9b44fb-153c-4aa8-87ec-04d27ab764ff actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.859058] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2023.870895] env[63371]: DEBUG oslo_concurrency.lockutils [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.871324] env[63371]: DEBUG oslo_concurrency.lockutils [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.871517] env[63371]: DEBUG oslo_concurrency.lockutils [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2023.871743] env[63371]: DEBUG oslo_concurrency.lockutils [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.871932] env[63371]: DEBUG oslo_concurrency.lockutils [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.874041] env[63371]: INFO nova.compute.manager [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Terminating instance [ 2023.876115] env[63371]: DEBUG nova.compute.manager [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2023.876311] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2023.877291] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd9497c-00fb-40a6-a7a9-ebe20ea213ee {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.885629] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2023.885876] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-37676bf7-41f4-411b-9f48-e43d77ee7588 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.891740] env[63371]: DEBUG oslo_vmware.api [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2023.891740] env[63371]: value = "task-1775171" [ 2023.891740] env[63371]: _type = "Task" [ 2023.891740] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.901487] env[63371]: DEBUG oslo_vmware.api [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.911099] env[63371]: DEBUG oslo_concurrency.lockutils [req-c91bf87a-aeba-4560-b4e0-ae99f48c0994 req-549e74ae-9fea-44c2-9ad2-01a143f968c8 service nova] Releasing lock "refresh_cache-9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2023.937159] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2023.937159] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52954f42-19d4-8eea-1523-21945f54b89d" [ 2023.937159] env[63371]: _type = "HttpNfcLease" [ 2023.937159] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2024.038950] env[63371]: DEBUG oslo_concurrency.lockutils [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "0a174705-f4ec-407c-b7ea-0945d5db46cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.039285] env[63371]: DEBUG oslo_concurrency.lockutils [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "0a174705-f4ec-407c-b7ea-0945d5db46cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.039513] env[63371]: DEBUG oslo_concurrency.lockutils [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "0a174705-f4ec-407c-b7ea-0945d5db46cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.039705] env[63371]: DEBUG oslo_concurrency.lockutils [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "0a174705-f4ec-407c-b7ea-0945d5db46cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.039870] env[63371]: DEBUG oslo_concurrency.lockutils [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "0a174705-f4ec-407c-b7ea-0945d5db46cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.042019] env[63371]: INFO nova.compute.manager [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Terminating instance [ 2024.044018] env[63371]: DEBUG nova.compute.manager [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2024.044306] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2024.044605] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52b27536-12ec-4280-af63-f81c051ed37d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.051879] env[63371]: DEBUG oslo_vmware.api [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 2024.051879] env[63371]: value = "task-1775172" [ 2024.051879] env[63371]: _type = "Task" [ 2024.051879] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.061635] env[63371]: DEBUG oslo_vmware.api [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.300991] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.362566] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 3d2dabd1-5c4f-4997-843c-e1e124b687ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2024.362795] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2024.362955] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2024.403345] env[63371]: DEBUG oslo_vmware.api [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775171, 'name': PowerOffVM_Task, 'duration_secs': 0.188155} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.403633] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2024.403792] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2024.404052] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75e8a4aa-9e81-4bc1-b140-c50aa158daba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.438902] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2024.438902] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52954f42-19d4-8eea-1523-21945f54b89d" [ 2024.438902] env[63371]: _type = "HttpNfcLease" [ 2024.438902] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2024.439213] env[63371]: DEBUG oslo_vmware.rw_handles [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2024.439213] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52954f42-19d4-8eea-1523-21945f54b89d" [ 2024.439213] env[63371]: _type = "HttpNfcLease" [ 2024.439213] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2024.439930] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c975d8-7a85-4281-8510-9eb6d6cc72d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.446931] env[63371]: DEBUG oslo_vmware.rw_handles [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5215966d-fcaa-538f-f4a2-d1fb5e5deb11/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2024.447142] env[63371]: DEBUG oslo_vmware.rw_handles [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating HTTP connection to write to file with size = 31670272 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5215966d-fcaa-538f-f4a2-d1fb5e5deb11/disk-0.vmdk. {{(pid=63371) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2024.449617] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a9eb9e-6add-4220-b3e9-2d665374e960 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.511018] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfecbc3-3ad3-4093-83f6-ca8a96d2fe5c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.539547] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-de647a46-a300-4587-89f2-a393a54dd813 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.543723] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c8e488-7cd7-4dc8-80b3-9c9c77cfb546 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.551846] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c68260-4243-438f-9e86-63cf55c147fd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.564022] env[63371]: DEBUG oslo_vmware.api [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775172, 'name': PowerOffVM_Task, 'duration_secs': 0.171288} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2024.570138] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2024.570353] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2024.570543] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368516', 'volume_id': '326a81a2-ec8c-4921-b1d8-903c122d6006', 'name': 'volume-326a81a2-ec8c-4921-b1d8-903c122d6006', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '0a174705-f4ec-407c-b7ea-0945d5db46cf', 'attached_at': '2024-12-11T21:42:21.000000', 'detached_at': '', 'volume_id': '326a81a2-ec8c-4921-b1d8-903c122d6006', 'serial': '326a81a2-ec8c-4921-b1d8-903c122d6006'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2024.571339] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2024.573109] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5576e1-db87-4900-94e9-8c2a3b906839 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.594753] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51571590-2edd-441c-b1c0-8133d910e7b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.597376] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2024.597574] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2024.597748] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleting the datastore file [datastore1] 6e9b44fb-153c-4aa8-87ec-04d27ab764ff {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2024.598243] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ecf69362-a128-4a9c-8b5c-4d7069762a25 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.603987] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae0e828-e0d7-4141-a203-8a842c64baec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.607278] env[63371]: DEBUG oslo_vmware.api [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2024.607278] env[63371]: value = "task-1775174" [ 2024.607278] env[63371]: _type = "Task" [ 2024.607278] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.623959] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-900eafd9-72f3-4987-8dc4-7bd055dca6e4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.629076] env[63371]: DEBUG oslo_vmware.api [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775174, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2024.641127] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] The volume has not been displaced from its original location: [datastore1] volume-326a81a2-ec8c-4921-b1d8-903c122d6006/volume-326a81a2-ec8c-4921-b1d8-903c122d6006.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2024.646354] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2024.646598] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54f343e0-c41f-4a39-a3cc-5747ca9315cc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.664006] env[63371]: DEBUG oslo_vmware.api [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 2024.664006] env[63371]: value = "task-1775175" [ 2024.664006] env[63371]: _type = "Task" [ 2024.664006] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2024.672122] env[63371]: DEBUG oslo_vmware.api [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775175, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.076564] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2025.117727] env[63371]: DEBUG oslo_vmware.api [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775174, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195273} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.118029] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2025.118187] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2025.118370] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2025.118542] env[63371]: INFO nova.compute.manager [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Took 1.24 seconds to destroy the instance on the hypervisor. [ 2025.118853] env[63371]: DEBUG oslo.service.loopingcall [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2025.118964] env[63371]: DEBUG nova.compute.manager [-] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2025.119076] env[63371]: DEBUG nova.network.neutron [-] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2025.174726] env[63371]: DEBUG oslo_vmware.api [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775175, 'name': ReconfigVM_Task, 'duration_secs': 0.175943} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.175968] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2025.180795] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec2dfa51-4bb3-4118-82bf-b53afba558ab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.200884] env[63371]: DEBUG oslo_vmware.api [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 2025.200884] env[63371]: value = "task-1775176" [ 2025.200884] env[63371]: _type = "Task" [ 2025.200884] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.210782] env[63371]: DEBUG oslo_vmware.api [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775176, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.581260] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2025.581522] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.754s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.581745] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.281s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.581903] env[63371]: DEBUG nova.objects.instance [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'pci_requests' on Instance uuid 3d2dabd1-5c4f-4997-843c-e1e124b687ba {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2025.583101] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2025.583250] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11229}} [ 2025.660243] env[63371]: DEBUG oslo_vmware.rw_handles [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Completed reading data from the image iterator. {{(pid=63371) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2025.660503] env[63371]: DEBUG oslo_vmware.rw_handles [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5215966d-fcaa-538f-f4a2-d1fb5e5deb11/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2025.661416] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4932c7a-a959-4e94-9bad-78c336057378 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.669386] env[63371]: DEBUG oslo_vmware.rw_handles [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5215966d-fcaa-538f-f4a2-d1fb5e5deb11/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2025.670125] env[63371]: DEBUG oslo_vmware.rw_handles [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5215966d-fcaa-538f-f4a2-d1fb5e5deb11/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2025.670125] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-3846c616-de3d-4af9-a974-5e5f54d3e321 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.682095] env[63371]: DEBUG nova.compute.manager [req-838e0d18-2949-4d41-a704-3aa146a80b91 req-5473117b-f2f6-4022-9d33-89f8d1f7e1df service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Received event network-vif-deleted-6fc15567-65bf-42ad-9a0a-1b1cee20b40b {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2025.683642] env[63371]: INFO nova.compute.manager [req-838e0d18-2949-4d41-a704-3aa146a80b91 req-5473117b-f2f6-4022-9d33-89f8d1f7e1df service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Neutron deleted interface 6fc15567-65bf-42ad-9a0a-1b1cee20b40b; detaching it from the instance and deleting it from the info cache [ 2025.683642] env[63371]: DEBUG nova.network.neutron [req-838e0d18-2949-4d41-a704-3aa146a80b91 req-5473117b-f2f6-4022-9d33-89f8d1f7e1df service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2025.710687] env[63371]: DEBUG oslo_vmware.api [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775176, 'name': ReconfigVM_Task, 'duration_secs': 0.166098} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2025.711045] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368516', 'volume_id': '326a81a2-ec8c-4921-b1d8-903c122d6006', 'name': 'volume-326a81a2-ec8c-4921-b1d8-903c122d6006', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attaching', 'instance': '0a174705-f4ec-407c-b7ea-0945d5db46cf', 'attached_at': '2024-12-11T21:42:21.000000', 'detached_at': '', 'volume_id': '326a81a2-ec8c-4921-b1d8-903c122d6006', 'serial': '326a81a2-ec8c-4921-b1d8-903c122d6006'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2025.711324] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2025.712045] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3da3668e-34f7-4e71-adf0-5ad4813e0ee8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.718860] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2025.719088] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca2a53cc-759f-4ebd-9e93-bb6f48f00e1c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.801291] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2025.801561] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2025.801807] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleting the datastore file [datastore1] 0a174705-f4ec-407c-b7ea-0945d5db46cf {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2025.802132] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0a593c6-610e-4779-a528-3d2c2879924b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.808895] env[63371]: DEBUG oslo_vmware.api [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 2025.808895] env[63371]: value = "task-1775178" [ 2025.808895] env[63371]: _type = "Task" [ 2025.808895] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.817028] env[63371]: DEBUG oslo_vmware.api [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775178, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.871777] env[63371]: DEBUG oslo_vmware.rw_handles [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5215966d-fcaa-538f-f4a2-d1fb5e5deb11/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2025.872081] env[63371]: INFO nova.virt.vmwareapi.images [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Downloaded image file data d0f4f856-b3e0-4946-9bc9-451dfdc1d434 [ 2025.873087] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b8ef93-c9ec-4df7-a717-e5448ea726be {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.892818] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2aa57d4e-aa85-483f-8977-b7d09cf755a4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.916743] env[63371]: INFO nova.virt.vmwareapi.images [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] The imported VM was unregistered [ 2025.919141] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Caching image {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2025.919431] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating directory with path [datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434 {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2025.919718] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-537ef4c6-bfeb-4703-a5ff-4ff0dfdd36a8 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.931259] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Created directory with path [datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434 {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2025.931366] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_3e41527e-3a96-441b-bd95-8fab4139512f/OSTACK_IMG_3e41527e-3a96-441b-bd95-8fab4139512f.vmdk to [datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434/d0f4f856-b3e0-4946-9bc9-451dfdc1d434.vmdk. {{(pid=63371) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2025.931646] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-8f1d6e8d-3c11-4048-8877-e2c89480e94a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.938412] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2025.938412] env[63371]: value = "task-1775180" [ 2025.938412] env[63371]: _type = "Task" [ 2025.938412] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.946451] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775180, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.098099] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] There are 41 instances to clean {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11238}} [ 2026.098299] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 37c33e03-30c7-4cf4-99a1-360d892dde2d] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2026.101912] env[63371]: DEBUG nova.objects.instance [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'numa_topology' on Instance uuid 3d2dabd1-5c4f-4997-843c-e1e124b687ba {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2026.165842] env[63371]: DEBUG nova.network.neutron [-] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2026.185662] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f93987e2-51b7-4e72-b910-09973068c74d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.198425] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e7c50c2-7cc9-4232-ab26-140c6235d437 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.226960] env[63371]: DEBUG nova.compute.manager [req-838e0d18-2949-4d41-a704-3aa146a80b91 req-5473117b-f2f6-4022-9d33-89f8d1f7e1df service nova] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Detach interface failed, port_id=6fc15567-65bf-42ad-9a0a-1b1cee20b40b, reason: Instance 6e9b44fb-153c-4aa8-87ec-04d27ab764ff could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2026.318524] env[63371]: DEBUG oslo_vmware.api [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775178, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084132} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.318801] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2026.318987] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2026.319175] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2026.319349] env[63371]: INFO nova.compute.manager [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Took 2.28 seconds to destroy the instance on the hypervisor. [ 2026.319674] env[63371]: DEBUG oslo.service.loopingcall [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2026.319900] env[63371]: DEBUG nova.compute.manager [-] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2026.319996] env[63371]: DEBUG nova.network.neutron [-] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2026.447611] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775180, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.605526] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: fac8df06-ab04-41ec-a32b-f46a08470a97] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2026.608186] env[63371]: INFO nova.compute.claims [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2026.668757] env[63371]: INFO nova.compute.manager [-] [instance: 6e9b44fb-153c-4aa8-87ec-04d27ab764ff] Took 1.55 seconds to deallocate network for instance. [ 2026.949191] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775180, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.113889] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: cfa04c51-c077-4f16-ae57-e54d62aac044] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2027.177321] env[63371]: DEBUG oslo_concurrency.lockutils [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2027.280657] env[63371]: DEBUG nova.network.neutron [-] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2027.449895] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775180, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.620050] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: a083adca-0638-4a39-bd4c-30c64d1c9b0e] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2027.707822] env[63371]: DEBUG nova.compute.manager [req-46c7ef19-0e50-45bb-b041-53d8b97f477b req-64e9c19b-80b5-43f2-b9b8-b3c59b49aa47 service nova] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Received event network-vif-deleted-429adb0e-c314-4234-9278-025fc3386ec7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2027.783183] env[63371]: INFO nova.compute.manager [-] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Took 1.46 seconds to deallocate network for instance. [ 2027.799211] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffe654e-e92f-4550-b843-f109c4b339b1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.807259] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89f42932-27d5-4f3f-bcff-36c9cfc5c849 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.839732] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cb255b-e51b-4c7d-9e15-41b449b1868d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.848194] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23931877-8131-4f78-8174-f9dd8b09e9a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.861888] env[63371]: DEBUG nova.compute.provider_tree [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2027.950410] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775180, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.123416] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e0fa0976-9a73-4b8b-b011-2e15199be5ff] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2028.336757] env[63371]: INFO nova.compute.manager [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Took 0.55 seconds to detach 1 volumes for instance. [ 2028.339136] env[63371]: DEBUG nova.compute.manager [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 0a174705-f4ec-407c-b7ea-0945d5db46cf] Deleting volume: 326a81a2-ec8c-4921-b1d8-903c122d6006 {{(pid=63371) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3248}} [ 2028.364930] env[63371]: DEBUG nova.scheduler.client.report [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2028.450412] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775180, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2028.627229] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 05f6f94a-c9c4-4737-8b07-77e9c2093497] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2028.870069] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.288s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.872769] env[63371]: DEBUG oslo_concurrency.lockutils [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.696s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2028.873082] env[63371]: DEBUG nova.objects.instance [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'resources' on Instance uuid 6e9b44fb-153c-4aa8-87ec-04d27ab764ff {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2028.881895] env[63371]: DEBUG oslo_concurrency.lockutils [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.903557] env[63371]: INFO nova.network.neutron [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updating port a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2028.950513] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775180, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.578216} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.950761] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_3e41527e-3a96-441b-bd95-8fab4139512f/OSTACK_IMG_3e41527e-3a96-441b-bd95-8fab4139512f.vmdk to [datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434/d0f4f856-b3e0-4946-9bc9-451dfdc1d434.vmdk. [ 2028.950971] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Cleaning up location [datastore1] OSTACK_IMG_3e41527e-3a96-441b-bd95-8fab4139512f {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2028.951188] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_3e41527e-3a96-441b-bd95-8fab4139512f {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2028.951447] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c97c9f9-9d0b-400b-b343-a404532c1f2f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.957223] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2028.957223] env[63371]: value = "task-1775182" [ 2028.957223] env[63371]: _type = "Task" [ 2028.957223] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2028.965103] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.130560] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: ff724a9f-5e9a-4683-8eb3-058fb3639ea5] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2029.454386] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8678e280-b3d8-49e4-b5fa-02f29a93358e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.463871] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581375d3-d7fe-4d7f-90c1-bfd94886af02 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.469521] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067872} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2029.470064] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2029.470258] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "[datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434/d0f4f856-b3e0-4946-9bc9-451dfdc1d434.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2029.470502] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434/d0f4f856-b3e0-4946-9bc9-451dfdc1d434.vmdk to [datastore1] 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d/9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2029.470755] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3bfe6755-c810-4432-8f45-fd593289a3ea {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.496662] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064d37f6-a2fa-4175-ba31-645c478902ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.501072] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2029.501072] env[63371]: value = "task-1775183" [ 2029.501072] env[63371]: _type = "Task" [ 2029.501072] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2029.506448] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b28466c7-c035-489a-9ef2-43a99b147a52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2029.514604] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2029.522531] env[63371]: DEBUG nova.compute.provider_tree [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2029.634187] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: a27ab7ba-481c-4292-a885-5dc8d8653d0b] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2030.012489] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775183, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.025689] env[63371]: DEBUG nova.scheduler.client.report [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2030.138309] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3a7463a2-09f4-4ad9-a21c-2b7cb5bb48ce] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2030.412228] env[63371]: DEBUG nova.compute.manager [req-1e74a142-e2d8-4eff-bae0-4d23d9af9898 req-c315b8a8-9f94-4756-837f-43044587df38 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Received event network-vif-plugged-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2030.412485] env[63371]: DEBUG oslo_concurrency.lockutils [req-1e74a142-e2d8-4eff-bae0-4d23d9af9898 req-c315b8a8-9f94-4756-837f-43044587df38 service nova] Acquiring lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.412734] env[63371]: DEBUG oslo_concurrency.lockutils [req-1e74a142-e2d8-4eff-bae0-4d23d9af9898 req-c315b8a8-9f94-4756-837f-43044587df38 service nova] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.412912] env[63371]: DEBUG oslo_concurrency.lockutils [req-1e74a142-e2d8-4eff-bae0-4d23d9af9898 req-c315b8a8-9f94-4756-837f-43044587df38 service nova] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.413175] env[63371]: DEBUG nova.compute.manager [req-1e74a142-e2d8-4eff-bae0-4d23d9af9898 req-c315b8a8-9f94-4756-837f-43044587df38 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] No waiting events found dispatching network-vif-plugged-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2030.413314] env[63371]: WARNING nova.compute.manager [req-1e74a142-e2d8-4eff-bae0-4d23d9af9898 req-c315b8a8-9f94-4756-837f-43044587df38 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Received unexpected event network-vif-plugged-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 for instance with vm_state shelved_offloaded and task_state spawning. [ 2030.513036] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775183, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2030.527198] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2030.527382] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2030.527560] env[63371]: DEBUG nova.network.neutron [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2030.532434] env[63371]: DEBUG oslo_concurrency.lockutils [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.659s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.534504] env[63371]: DEBUG oslo_concurrency.lockutils [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.653s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2030.534561] env[63371]: DEBUG nova.objects.instance [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lazy-loading 'resources' on Instance uuid 0a174705-f4ec-407c-b7ea-0945d5db46cf {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2030.552575] env[63371]: INFO nova.scheduler.client.report [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleted allocations for instance 6e9b44fb-153c-4aa8-87ec-04d27ab764ff [ 2030.642812] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 1689fc63-3c07-4517-bbef-0011d860e9fc] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2031.014857] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775183, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.060689] env[63371]: DEBUG oslo_concurrency.lockutils [None req-03f9034f-77c1-4c08-84ed-11d92dc82077 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "6e9b44fb-153c-4aa8-87ec-04d27ab764ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.189s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2031.119823] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ad3263-4922-4178-be97-539190ab59f6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.130650] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-789dc8d0-b408-4aba-9167-37c05573f530 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.162417] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: da2e3b05-9cb0-49bb-8945-924e48cf3431] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2031.169026] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a35693d-592d-4e73-8fee-cd7f4ddef192 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.177111] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa1dfea-4959-4335-b766-b6210fbbb3b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.193200] env[63371]: DEBUG nova.compute.provider_tree [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2031.357547] env[63371]: DEBUG nova.network.neutron [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updating instance_info_cache with network_info: [{"id": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "address": "fa:16:3e:00:9c:75", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa21caeee-a9", "ovs_interfaceid": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.515484] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775183, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.672134] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 407d1ef8-c5df-4277-b503-0d09cdaf8ef1] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2031.696670] env[63371]: DEBUG nova.scheduler.client.report [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2031.860362] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2031.883799] env[63371]: DEBUG nova.virt.hardware [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='4d97111fb4297a982b1805070268c171',container_format='bare',created_at=2024-12-11T21:42:33Z,direct_url=,disk_format='vmdk',id=6a997963-9627-47ab-bf68-1b38285cf7d9,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1843362805-shelved',owner='ceecd2a995cf4da0b4218e371065ca0b',properties=ImageMetaProps,protected=,size=31665152,status='active',tags=,updated_at=2024-12-11T21:42:52Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2031.884098] env[63371]: DEBUG nova.virt.hardware [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2031.884294] env[63371]: DEBUG nova.virt.hardware [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2031.884491] env[63371]: DEBUG nova.virt.hardware [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2031.884743] env[63371]: DEBUG nova.virt.hardware [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2031.884906] env[63371]: DEBUG nova.virt.hardware [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2031.885194] env[63371]: DEBUG nova.virt.hardware [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2031.885356] env[63371]: DEBUG nova.virt.hardware [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2031.885587] env[63371]: DEBUG nova.virt.hardware [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2031.885811] env[63371]: DEBUG nova.virt.hardware [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2031.886052] env[63371]: DEBUG nova.virt.hardware [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2031.887088] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02dc73ed-8891-40ae-b7bf-5d88fdc501ce {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.895805] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cebb36d-8fcd-4dfc-a4f5-be860dd83991 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.911127] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:9c:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3a0ddd7d-c321-4187-bdd8-b19044ea2c4a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a21caeee-a9c4-4ead-8c4e-4dc84446b5b4', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2031.918905] env[63371]: DEBUG oslo.service.loopingcall [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2031.919256] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2031.919481] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28b09ba5-cb7e-4460-938e-265b55630eeb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.939198] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2031.939198] env[63371]: value = "task-1775184" [ 2031.939198] env[63371]: _type = "Task" [ 2031.939198] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.947627] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775184, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.972457] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "8591c7e6-37a5-421f-8627-28a3b022537a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2031.972720] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "8591c7e6-37a5-421f-8627-28a3b022537a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2032.015578] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775183, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.175277] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: d042bb16-c84d-42bb-af3f-38c08995fd91] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2032.201839] env[63371]: DEBUG oslo_concurrency.lockutils [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.667s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2032.225651] env[63371]: INFO nova.scheduler.client.report [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted allocations for instance 0a174705-f4ec-407c-b7ea-0945d5db46cf [ 2032.440496] env[63371]: DEBUG nova.compute.manager [req-d23a1616-9b3c-4a86-ad14-510a72a4a8de req-1e8d890b-7c6f-4732-a4fb-9e3d9205ad60 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Received event network-changed-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2032.440696] env[63371]: DEBUG nova.compute.manager [req-d23a1616-9b3c-4a86-ad14-510a72a4a8de req-1e8d890b-7c6f-4732-a4fb-9e3d9205ad60 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Refreshing instance network info cache due to event network-changed-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2032.440915] env[63371]: DEBUG oslo_concurrency.lockutils [req-d23a1616-9b3c-4a86-ad14-510a72a4a8de req-1e8d890b-7c6f-4732-a4fb-9e3d9205ad60 service nova] Acquiring lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2032.441075] env[63371]: DEBUG oslo_concurrency.lockutils [req-d23a1616-9b3c-4a86-ad14-510a72a4a8de req-1e8d890b-7c6f-4732-a4fb-9e3d9205ad60 service nova] Acquired lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2032.441273] env[63371]: DEBUG nova.network.neutron [req-d23a1616-9b3c-4a86-ad14-510a72a4a8de req-1e8d890b-7c6f-4732-a4fb-9e3d9205ad60 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Refreshing network info cache for port a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2032.451972] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775184, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.475235] env[63371]: DEBUG nova.compute.manager [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2032.516847] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775183, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.678708] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3e2f17e7-8c9c-47c0-afb1-55e56eab74fd] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2032.733448] env[63371]: DEBUG oslo_concurrency.lockutils [None req-981cab92-097f-49ec-a45a-8ee81d143122 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "0a174705-f4ec-407c-b7ea-0945d5db46cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.694s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2032.954720] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775184, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.995648] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2032.995648] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2032.997483] env[63371]: INFO nova.compute.claims [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2033.000694] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "29791f6c-edec-44b3-828b-0e306d167c42" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.001315] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "29791f6c-edec-44b3-828b-0e306d167c42" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.001553] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "29791f6c-edec-44b3-828b-0e306d167c42-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2033.001748] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "29791f6c-edec-44b3-828b-0e306d167c42-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2033.001949] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "29791f6c-edec-44b3-828b-0e306d167c42-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2033.006034] env[63371]: INFO nova.compute.manager [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Terminating instance [ 2033.008026] env[63371]: DEBUG nova.compute.manager [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2033.008026] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2033.008565] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274fb29e-dde7-49be-93c6-c5ab22fa0111 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.020630] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775183, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.025032] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2033.025337] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c411c44-e2f4-46f0-8cdf-5187d33fc7b0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.032073] env[63371]: DEBUG oslo_vmware.api [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 2033.032073] env[63371]: value = "task-1775185" [ 2033.032073] env[63371]: _type = "Task" [ 2033.032073] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.043302] env[63371]: DEBUG oslo_vmware.api [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775185, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.167620] env[63371]: DEBUG nova.network.neutron [req-d23a1616-9b3c-4a86-ad14-510a72a4a8de req-1e8d890b-7c6f-4732-a4fb-9e3d9205ad60 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updated VIF entry in instance network info cache for port a21caeee-a9c4-4ead-8c4e-4dc84446b5b4. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2033.167992] env[63371]: DEBUG nova.network.neutron [req-d23a1616-9b3c-4a86-ad14-510a72a4a8de req-1e8d890b-7c6f-4732-a4fb-9e3d9205ad60 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updating instance_info_cache with network_info: [{"id": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "address": "fa:16:3e:00:9c:75", "network": {"id": "d0bd8fa4-3e04-4c38-9bd8-4efaa7b7eb44", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-45917408-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.222", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ceecd2a995cf4da0b4218e371065ca0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3a0ddd7d-c321-4187-bdd8-b19044ea2c4a", "external-id": "nsx-vlan-transportzone-747", "segmentation_id": 747, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa21caeee-a9", "ovs_interfaceid": "a21caeee-a9c4-4ead-8c4e-4dc84446b5b4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2033.182183] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 9885de9e-c640-4d82-a47a-980988d89deb] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2033.454431] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775184, 'name': CreateVM_Task} progress is 99%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.517429] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775183, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.555544} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.517654] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/d0f4f856-b3e0-4946-9bc9-451dfdc1d434/d0f4f856-b3e0-4946-9bc9-451dfdc1d434.vmdk to [datastore1] 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d/9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2033.518441] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d16c8f-7d5c-445a-adf9-f80e3b8289ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.541563] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d/9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d.vmdk or device None with type streamOptimized {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2033.542204] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46b6052e-0c81-4ebd-a830-7617444cef89 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.566349] env[63371]: DEBUG oslo_vmware.api [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775185, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.567670] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2033.567670] env[63371]: value = "task-1775186" [ 2033.567670] env[63371]: _type = "Task" [ 2033.567670] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.575455] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775186, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.670616] env[63371]: DEBUG oslo_concurrency.lockutils [req-d23a1616-9b3c-4a86-ad14-510a72a4a8de req-1e8d890b-7c6f-4732-a4fb-9e3d9205ad60 service nova] Releasing lock "refresh_cache-3d2dabd1-5c4f-4997-843c-e1e124b687ba" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2033.685468] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 1ec21edd-7b7c-4a2b-983f-8aa6c022e033] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2033.955210] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775184, 'name': CreateVM_Task, 'duration_secs': 1.533658} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.955388] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2033.956179] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2033.956355] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2033.956741] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2033.957013] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74bd7ffe-4c9c-4470-89b4-5b0779e49db5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.961839] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2033.961839] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f767f3-12aa-0d97-7cf5-bf3dd6a84362" [ 2033.961839] env[63371]: _type = "Task" [ 2033.961839] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.970611] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f767f3-12aa-0d97-7cf5-bf3dd6a84362, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.052403] env[63371]: DEBUG oslo_vmware.api [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775185, 'name': PowerOffVM_Task, 'duration_secs': 0.560812} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.054548] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2034.054780] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2034.055231] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-704cbe41-91d5-4102-ae24-a49a11a99cb3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.072581] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e1f15a-87f2-4714-a669-05269660295a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.077669] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775186, 'name': ReconfigVM_Task, 'duration_secs': 0.282901} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.078203] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d/9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d.vmdk or device None with type streamOptimized {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2034.079383] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'guest_format': None, 'encryption_format': None, 'device_name': '/dev/sda', 'disk_bus': None, 'encryption_options': None, 'encrypted': False, 'boot_index': 0, 'encryption_secret_uuid': None, 'device_type': 'disk', 'size': 0, 'image_id': '1aeb47a7-4e18-481d-b3c0-d33e8c7839d9'}], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'disk_bus': None, 'delete_on_termination': False, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368525', 'volume_id': '44fef38f-3bfe-4eb9-814e-26572a81abc3', 'name': 'volume-44fef38f-3bfe-4eb9-814e-26572a81abc3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d', 'attached_at': '', 'detached_at': '', 'volume_id': '44fef38f-3bfe-4eb9-814e-26572a81abc3', 'serial': '44fef38f-3bfe-4eb9-814e-26572a81abc3'}, 'boot_index': None, 'device_type': None, 'attachment_id': '0d550b81-0f70-46d7-a50e-722a9385f6d6', 'mount_device': '/dev/sdb', 'volume_type': None}], 'swap': None} {{(pid=63371) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2034.079591] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Volume attach. Driver type: vmdk {{(pid=63371) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2034.079778] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368525', 'volume_id': '44fef38f-3bfe-4eb9-814e-26572a81abc3', 'name': 'volume-44fef38f-3bfe-4eb9-814e-26572a81abc3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d', 'attached_at': '', 'detached_at': '', 'volume_id': '44fef38f-3bfe-4eb9-814e-26572a81abc3', 'serial': '44fef38f-3bfe-4eb9-814e-26572a81abc3'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2034.080538] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39e8e33-12ea-4797-bcf0-24cc79c20738 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.085424] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b2fe0f-5358-43f1-a2f4-20d230f9d022 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.099295] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48656043-fdf7-41f5-8852-4f28f26210a6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.126723] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4033c5bd-0185-4091-9925-fd49f4978977 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.148104] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] volume-44fef38f-3bfe-4eb9-814e-26572a81abc3/volume-44fef38f-3bfe-4eb9-814e-26572a81abc3.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2034.148634] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba876a7c-dbbc-4d03-83fe-f48a6efabedb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.164384] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e900517-d9fa-450a-9387-0c095bc34924 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.169057] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2034.169057] env[63371]: value = "task-1775188" [ 2034.169057] env[63371]: _type = "Task" [ 2034.169057] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.179249] env[63371]: DEBUG nova.compute.provider_tree [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2034.186106] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775188, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.188431] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 0518c5a8-8cc1-4829-a0cf-5f5904f6df86] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2034.209228] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2034.209421] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2034.209624] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleting the datastore file [datastore1] 29791f6c-edec-44b3-828b-0e306d167c42 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2034.209902] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f9670d4-246d-4e13-8b40-d1573ff75f59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.216586] env[63371]: DEBUG oslo_vmware.api [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 2034.216586] env[63371]: value = "task-1775189" [ 2034.216586] env[63371]: _type = "Task" [ 2034.216586] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.224499] env[63371]: DEBUG oslo_vmware.api [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775189, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.473405] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2034.473651] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Processing image 6a997963-9627-47ab-bf68-1b38285cf7d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2034.473902] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9/6a997963-9627-47ab-bf68-1b38285cf7d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2034.474073] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9/6a997963-9627-47ab-bf68-1b38285cf7d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2034.474265] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2034.474511] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b18b624d-4e24-4ffa-af88-2e155b0c375a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.482191] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2034.482359] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2034.483059] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60e8f102-6e3a-482c-aa0b-2d6808f56bf9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.487775] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2034.487775] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca27f5-fd52-9de8-50cb-36da0d132173" [ 2034.487775] env[63371]: _type = "Task" [ 2034.487775] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.502636] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52ca27f5-fd52-9de8-50cb-36da0d132173, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.678524] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775188, 'name': ReconfigVM_Task, 'duration_secs': 0.302867} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.678882] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Reconfigured VM instance instance-00000077 to attach disk [datastore1] volume-44fef38f-3bfe-4eb9-814e-26572a81abc3/volume-44fef38f-3bfe-4eb9-814e-26572a81abc3.vmdk or device None with type thin {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2034.684172] env[63371]: DEBUG nova.scheduler.client.report [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2034.687252] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5484b34-91a0-4270-9445-da170a6e25f1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.697459] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 6c2edb87-7a36-4814-ac4a-199cdca1ef68] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2034.704433] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2034.704433] env[63371]: value = "task-1775190" [ 2034.704433] env[63371]: _type = "Task" [ 2034.704433] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.712412] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775190, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.727333] env[63371]: DEBUG oslo_vmware.api [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775189, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131886} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.727694] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2034.728009] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2034.728277] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2034.728461] env[63371]: INFO nova.compute.manager [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Took 1.72 seconds to destroy the instance on the hypervisor. [ 2034.728720] env[63371]: DEBUG oslo.service.loopingcall [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2034.728923] env[63371]: DEBUG nova.compute.manager [-] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2034.729041] env[63371]: DEBUG nova.network.neutron [-] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2034.997979] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Preparing fetch location {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2034.998275] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Fetch image to [datastore1] OSTACK_IMG_66381870-b13e-4e1a-8894-e9baba8f9b92/OSTACK_IMG_66381870-b13e-4e1a-8894-e9baba8f9b92.vmdk {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2034.998471] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Downloading stream optimized image 6a997963-9627-47ab-bf68-1b38285cf7d9 to [datastore1] OSTACK_IMG_66381870-b13e-4e1a-8894-e9baba8f9b92/OSTACK_IMG_66381870-b13e-4e1a-8894-e9baba8f9b92.vmdk on the data store datastore1 as vApp {{(pid=63371) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2034.998649] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Downloading image file data 6a997963-9627-47ab-bf68-1b38285cf7d9 to the ESX as VM named 'OSTACK_IMG_66381870-b13e-4e1a-8894-e9baba8f9b92' {{(pid=63371) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2035.072030] env[63371]: DEBUG oslo_vmware.rw_handles [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2035.072030] env[63371]: value = "resgroup-9" [ 2035.072030] env[63371]: _type = "ResourcePool" [ 2035.072030] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2035.072772] env[63371]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-416188e7-e4cf-4b95-a342-ecc193ee149e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.100981] env[63371]: DEBUG oslo_vmware.rw_handles [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lease: (returnval){ [ 2035.100981] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525eea2b-f73a-cad6-6903-6b0b1c255b6e" [ 2035.100981] env[63371]: _type = "HttpNfcLease" [ 2035.100981] env[63371]: } obtained for vApp import into resource pool (val){ [ 2035.100981] env[63371]: value = "resgroup-9" [ 2035.100981] env[63371]: _type = "ResourcePool" [ 2035.100981] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2035.101248] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the lease: (returnval){ [ 2035.101248] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525eea2b-f73a-cad6-6903-6b0b1c255b6e" [ 2035.101248] env[63371]: _type = "HttpNfcLease" [ 2035.101248] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2035.107288] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2035.107288] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525eea2b-f73a-cad6-6903-6b0b1c255b6e" [ 2035.107288] env[63371]: _type = "HttpNfcLease" [ 2035.107288] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2035.178558] env[63371]: DEBUG nova.compute.manager [req-7da0ac31-364e-4cea-8ac6-8cb59d6aef9a req-ddfaf556-decb-41e1-805f-64dcf278ed2d service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Received event network-vif-deleted-34275fec-e3cb-4276-9619-f3498ff59a3a {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2035.179244] env[63371]: INFO nova.compute.manager [req-7da0ac31-364e-4cea-8ac6-8cb59d6aef9a req-ddfaf556-decb-41e1-805f-64dcf278ed2d service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Neutron deleted interface 34275fec-e3cb-4276-9619-f3498ff59a3a; detaching it from the instance and deleting it from the info cache [ 2035.179244] env[63371]: DEBUG nova.network.neutron [req-7da0ac31-364e-4cea-8ac6-8cb59d6aef9a req-ddfaf556-decb-41e1-805f-64dcf278ed2d service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2035.198903] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.203s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2035.199475] env[63371]: DEBUG nova.compute.manager [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2035.202609] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3b2ed231-9f9c-4d28-9c81-034c2d17c9a7] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2035.214463] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775190, 'name': ReconfigVM_Task, 'duration_secs': 0.152659} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.214778] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368525', 'volume_id': '44fef38f-3bfe-4eb9-814e-26572a81abc3', 'name': 'volume-44fef38f-3bfe-4eb9-814e-26572a81abc3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d', 'attached_at': '', 'detached_at': '', 'volume_id': '44fef38f-3bfe-4eb9-814e-26572a81abc3', 'serial': '44fef38f-3bfe-4eb9-814e-26572a81abc3'} {{(pid=63371) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2035.215380] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ae4d9711-472f-4a4a-af8e-2c120ebb555f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.222781] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2035.222781] env[63371]: value = "task-1775192" [ 2035.222781] env[63371]: _type = "Task" [ 2035.222781] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.231479] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775192, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.610075] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2035.610075] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525eea2b-f73a-cad6-6903-6b0b1c255b6e" [ 2035.610075] env[63371]: _type = "HttpNfcLease" [ 2035.610075] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2035.610385] env[63371]: DEBUG oslo_vmware.rw_handles [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2035.610385] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]525eea2b-f73a-cad6-6903-6b0b1c255b6e" [ 2035.610385] env[63371]: _type = "HttpNfcLease" [ 2035.610385] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2035.611110] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6508b01a-8e6c-42ce-98e0-49515627f28b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.618288] env[63371]: DEBUG oslo_vmware.rw_handles [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5244bb30-d184-868f-de5f-c088be0b57d0/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2035.618468] env[63371]: DEBUG oslo_vmware.rw_handles [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating HTTP connection to write to file with size = 31665152 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5244bb30-d184-868f-de5f-c088be0b57d0/disk-0.vmdk. {{(pid=63371) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2035.674252] env[63371]: DEBUG nova.network.neutron [-] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2035.681776] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1ee26295-2d12-442b-ad0e-af3a2886a76b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.683653] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fddedd71-4831-4ded-829f-c4c80d24b7e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.695258] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f783f74-b09e-4f49-93d7-65a533dbd5a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.708225] env[63371]: DEBUG nova.compute.utils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2035.710500] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: da4839fa-8597-411c-b30c-0ac9226fec1f] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2035.712148] env[63371]: DEBUG nova.compute.manager [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2035.712422] env[63371]: DEBUG nova.network.neutron [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2035.732572] env[63371]: DEBUG nova.compute.manager [req-7da0ac31-364e-4cea-8ac6-8cb59d6aef9a req-ddfaf556-decb-41e1-805f-64dcf278ed2d service nova] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Detach interface failed, port_id=34275fec-e3cb-4276-9619-f3498ff59a3a, reason: Instance 29791f6c-edec-44b3-828b-0e306d167c42 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2035.738358] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775192, 'name': Rename_Task, 'duration_secs': 0.139755} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.738657] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2035.738868] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3d7468a-c87f-4162-a8cd-d3b4b9ef884a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.745062] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2035.745062] env[63371]: value = "task-1775193" [ 2035.745062] env[63371]: _type = "Task" [ 2035.745062] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.752447] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775193, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.753854] env[63371]: DEBUG nova.policy [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f85b2454eed34665b92a1ebc087353c1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f98ab0107f5040139ef8be7c3ae22207', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 2036.073287] env[63371]: DEBUG nova.network.neutron [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Successfully created port: fd85bc34-1186-464a-b7f2-5c62353373fb {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2036.178015] env[63371]: INFO nova.compute.manager [-] [instance: 29791f6c-edec-44b3-828b-0e306d167c42] Took 1.45 seconds to deallocate network for instance. [ 2036.210935] env[63371]: DEBUG nova.compute.manager [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2036.214704] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 943e2506-03a4-4633-b55b-381d9d8d9ef6] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2036.258710] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775193, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.686629] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2036.687335] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.687335] env[63371]: DEBUG nova.objects.instance [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lazy-loading 'resources' on Instance uuid 29791f6c-edec-44b3-828b-0e306d167c42 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2036.723051] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 0c8c6997-bec8-4a3b-80cf-cbf35f3843f8] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2036.758088] env[63371]: DEBUG oslo_vmware.api [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775193, 'name': PowerOnVM_Task, 'duration_secs': 0.520504} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.759912] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2036.800028] env[63371]: DEBUG oslo_vmware.rw_handles [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Completed reading data from the image iterator. {{(pid=63371) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2036.800028] env[63371]: DEBUG oslo_vmware.rw_handles [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5244bb30-d184-868f-de5f-c088be0b57d0/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2036.800305] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3ad53e-f79d-4145-8ae4-d95979437f36 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.807452] env[63371]: DEBUG oslo_vmware.rw_handles [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5244bb30-d184-868f-de5f-c088be0b57d0/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2036.807659] env[63371]: DEBUG oslo_vmware.rw_handles [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5244bb30-d184-868f-de5f-c088be0b57d0/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2036.807807] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-bc614c57-108c-4843-ac55-e287ad678f62 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.870814] env[63371]: DEBUG nova.compute.manager [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2036.871885] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef894bfc-d983-4384-95fc-4124d10aec8d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.011980] env[63371]: DEBUG oslo_vmware.rw_handles [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5244bb30-d184-868f-de5f-c088be0b57d0/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2037.012222] env[63371]: INFO nova.virt.vmwareapi.images [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Downloaded image file data 6a997963-9627-47ab-bf68-1b38285cf7d9 [ 2037.013102] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ff463d1-093e-48a4-9f17-286eea6429bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.030206] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7aa9efaa-2e86-4788-89a8-f6f4435ab4ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.056611] env[63371]: INFO nova.virt.vmwareapi.images [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] The imported VM was unregistered [ 2037.058891] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Caching image {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2037.059109] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Creating directory with path [datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9 {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2037.059370] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8cbf762d-e967-465a-a2b1-d6d4dd16120d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.069162] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Created directory with path [datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9 {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2037.069345] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_66381870-b13e-4e1a-8894-e9baba8f9b92/OSTACK_IMG_66381870-b13e-4e1a-8894-e9baba8f9b92.vmdk to [datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9/6a997963-9627-47ab-bf68-1b38285cf7d9.vmdk. {{(pid=63371) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2037.069584] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-8e1f1a1b-176a-4eff-8499-6f0b27f91d50 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.076397] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2037.076397] env[63371]: value = "task-1775195" [ 2037.076397] env[63371]: _type = "Task" [ 2037.076397] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.083839] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775195, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.226045] env[63371]: DEBUG nova.compute.manager [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2037.228095] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: e16e4a55-4198-4308-b12c-d9ac07daecad] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2037.249565] env[63371]: DEBUG nova.virt.hardware [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2037.249797] env[63371]: DEBUG nova.virt.hardware [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2037.249953] env[63371]: DEBUG nova.virt.hardware [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2037.250152] env[63371]: DEBUG nova.virt.hardware [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2037.250312] env[63371]: DEBUG nova.virt.hardware [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2037.250438] env[63371]: DEBUG nova.virt.hardware [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2037.250648] env[63371]: DEBUG nova.virt.hardware [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2037.250845] env[63371]: DEBUG nova.virt.hardware [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2037.251060] env[63371]: DEBUG nova.virt.hardware [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2037.251208] env[63371]: DEBUG nova.virt.hardware [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2037.251381] env[63371]: DEBUG nova.virt.hardware [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2037.252264] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03b701c-64ba-41b0-8c89-24ac5cbb9057 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.264701] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611ae894-e587-4a8c-a56d-fed95bbfcb84 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.284247] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-868a1bb8-defd-437f-8532-c1dcc6f050db {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.291801] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07aaf775-8530-415d-a93f-cba91ea89289 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.322919] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d97461-a1a3-490b-b2b8-1607802c3e04 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.330789] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699e03d9-be7c-4a05-9741-5ca46461a12e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.346577] env[63371]: DEBUG nova.compute.provider_tree [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2037.390547] env[63371]: DEBUG oslo_concurrency.lockutils [None req-77dc329a-587e-4194-91e7-50baa4221877 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 29.460s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.553470] env[63371]: DEBUG nova.compute.manager [req-65852a70-5295-4056-ab79-f2979d3173b6 req-bd6020c7-dcc5-476d-9bf0-113dd1365ed5 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Received event network-vif-plugged-fd85bc34-1186-464a-b7f2-5c62353373fb {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2037.553470] env[63371]: DEBUG oslo_concurrency.lockutils [req-65852a70-5295-4056-ab79-f2979d3173b6 req-bd6020c7-dcc5-476d-9bf0-113dd1365ed5 service nova] Acquiring lock "8591c7e6-37a5-421f-8627-28a3b022537a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2037.553470] env[63371]: DEBUG oslo_concurrency.lockutils [req-65852a70-5295-4056-ab79-f2979d3173b6 req-bd6020c7-dcc5-476d-9bf0-113dd1365ed5 service nova] Lock "8591c7e6-37a5-421f-8627-28a3b022537a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2037.553886] env[63371]: DEBUG oslo_concurrency.lockutils [req-65852a70-5295-4056-ab79-f2979d3173b6 req-bd6020c7-dcc5-476d-9bf0-113dd1365ed5 service nova] Lock "8591c7e6-37a5-421f-8627-28a3b022537a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.554389] env[63371]: DEBUG nova.compute.manager [req-65852a70-5295-4056-ab79-f2979d3173b6 req-bd6020c7-dcc5-476d-9bf0-113dd1365ed5 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] No waiting events found dispatching network-vif-plugged-fd85bc34-1186-464a-b7f2-5c62353373fb {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2037.555069] env[63371]: WARNING nova.compute.manager [req-65852a70-5295-4056-ab79-f2979d3173b6 req-bd6020c7-dcc5-476d-9bf0-113dd1365ed5 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Received unexpected event network-vif-plugged-fd85bc34-1186-464a-b7f2-5c62353373fb for instance with vm_state building and task_state spawning. [ 2037.586923] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775195, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.646380] env[63371]: DEBUG nova.network.neutron [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Successfully updated port: fd85bc34-1186-464a-b7f2-5c62353373fb {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2037.732565] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 485a2d6a-1b58-470d-9dc5-8cf31b6726ef] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2037.849600] env[63371]: DEBUG nova.scheduler.client.report [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2038.087576] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775195, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.149507] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2038.149507] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2038.149507] env[63371]: DEBUG nova.network.neutron [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2038.236348] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 7349ecf6-2de7-4540-b713-7e29cbd3ff0b] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2038.355752] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.669s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.382012] env[63371]: INFO nova.scheduler.client.report [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted allocations for instance 29791f6c-edec-44b3-828b-0e306d167c42 [ 2038.588405] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775195, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.684906] env[63371]: DEBUG nova.network.neutron [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2038.740129] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: f391d4f3-6e9d-4ddc-918a-8dc8581dfc00] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2038.846788] env[63371]: DEBUG nova.network.neutron [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Updating instance_info_cache with network_info: [{"id": "fd85bc34-1186-464a-b7f2-5c62353373fb", "address": "fa:16:3e:12:88:f0", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd85bc34-11", "ovs_interfaceid": "fd85bc34-1186-464a-b7f2-5c62353373fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2038.890624] env[63371]: DEBUG oslo_concurrency.lockutils [None req-9886011e-966f-48b7-9556-23ac42dc77fc tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "29791f6c-edec-44b3-828b-0e306d167c42" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.889s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.089067] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775195, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.243611] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 11527051-7a4f-481a-b5ed-14550c550c4e] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2039.349498] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2039.349817] env[63371]: DEBUG nova.compute.manager [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Instance network_info: |[{"id": "fd85bc34-1186-464a-b7f2-5c62353373fb", "address": "fa:16:3e:12:88:f0", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd85bc34-11", "ovs_interfaceid": "fd85bc34-1186-464a-b7f2-5c62353373fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2039.350298] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:88:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dbd2870d-a51d-472a-8034-1b3e132b5cb6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd85bc34-1186-464a-b7f2-5c62353373fb', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2039.358361] env[63371]: DEBUG oslo.service.loopingcall [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2039.358591] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2039.358853] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a82f25d5-638d-4bf0-a62c-b3d76654df4a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.381702] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2039.381702] env[63371]: value = "task-1775197" [ 2039.381702] env[63371]: _type = "Task" [ 2039.381702] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.390569] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775197, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.581754] env[63371]: DEBUG nova.compute.manager [req-1d5e58e2-e5e0-4b7c-af8c-e8a66979aa12 req-14ae0153-ebd6-48df-83f1-299bd68b9de0 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Received event network-changed-fd85bc34-1186-464a-b7f2-5c62353373fb {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2039.581960] env[63371]: DEBUG nova.compute.manager [req-1d5e58e2-e5e0-4b7c-af8c-e8a66979aa12 req-14ae0153-ebd6-48df-83f1-299bd68b9de0 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Refreshing instance network info cache due to event network-changed-fd85bc34-1186-464a-b7f2-5c62353373fb. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2039.582239] env[63371]: DEBUG oslo_concurrency.lockutils [req-1d5e58e2-e5e0-4b7c-af8c-e8a66979aa12 req-14ae0153-ebd6-48df-83f1-299bd68b9de0 service nova] Acquiring lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.582388] env[63371]: DEBUG oslo_concurrency.lockutils [req-1d5e58e2-e5e0-4b7c-af8c-e8a66979aa12 req-14ae0153-ebd6-48df-83f1-299bd68b9de0 service nova] Acquired lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.582549] env[63371]: DEBUG nova.network.neutron [req-1d5e58e2-e5e0-4b7c-af8c-e8a66979aa12 req-14ae0153-ebd6-48df-83f1-299bd68b9de0 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Refreshing network info cache for port fd85bc34-1186-464a-b7f2-5c62353373fb {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2039.593064] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775195, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.407885} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.593788] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_66381870-b13e-4e1a-8894-e9baba8f9b92/OSTACK_IMG_66381870-b13e-4e1a-8894-e9baba8f9b92.vmdk to [datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9/6a997963-9627-47ab-bf68-1b38285cf7d9.vmdk. [ 2039.593981] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Cleaning up location [datastore1] OSTACK_IMG_66381870-b13e-4e1a-8894-e9baba8f9b92 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2039.594196] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_66381870-b13e-4e1a-8894-e9baba8f9b92 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2039.594502] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4f56dcd5-ad56-4a62-9b24-10d13f2617e5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.601838] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2039.601838] env[63371]: value = "task-1775198" [ 2039.601838] env[63371]: _type = "Task" [ 2039.601838] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.609456] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775198, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.747409] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 3a6c12a7-732f-4a73-a8c5-6810b554cc03] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2039.891291] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775197, 'name': CreateVM_Task, 'duration_secs': 0.392615} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.891414] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2039.892293] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.892440] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.892802] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2039.892961] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79eb4c96-2776-4879-bcfe-724a637f34b7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.897194] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2039.897194] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52193b56-c935-7aa7-e0d0-83bae693a088" [ 2039.897194] env[63371]: _type = "Task" [ 2039.897194] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.904479] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52193b56-c935-7aa7-e0d0-83bae693a088, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.112336] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775198, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082376} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.112689] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2040.112869] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9/6a997963-9627-47ab-bf68-1b38285cf7d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.113122] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9/6a997963-9627-47ab-bf68-1b38285cf7d9.vmdk to [datastore1] 3d2dabd1-5c4f-4997-843c-e1e124b687ba/3d2dabd1-5c4f-4997-843c-e1e124b687ba.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2040.113857] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-38b54c49-30ab-4414-bfc3-62e2c63e8f97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.119898] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2040.119898] env[63371]: value = "task-1775199" [ 2040.119898] env[63371]: _type = "Task" [ 2040.119898] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.127187] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775199, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.251508] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 158259a4-f54a-4192-b235-f03838193516] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2040.349207] env[63371]: DEBUG nova.network.neutron [req-1d5e58e2-e5e0-4b7c-af8c-e8a66979aa12 req-14ae0153-ebd6-48df-83f1-299bd68b9de0 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Updated VIF entry in instance network info cache for port fd85bc34-1186-464a-b7f2-5c62353373fb. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2040.349635] env[63371]: DEBUG nova.network.neutron [req-1d5e58e2-e5e0-4b7c-af8c-e8a66979aa12 req-14ae0153-ebd6-48df-83f1-299bd68b9de0 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Updating instance_info_cache with network_info: [{"id": "fd85bc34-1186-464a-b7f2-5c62353373fb", "address": "fa:16:3e:12:88:f0", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd85bc34-11", "ovs_interfaceid": "fd85bc34-1186-464a-b7f2-5c62353373fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.408659] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52193b56-c935-7aa7-e0d0-83bae693a088, 'name': SearchDatastore_Task, 'duration_secs': 0.010751} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.409128] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.409361] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2040.409611] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2040.409759] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2040.409938] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2040.410328] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f12c0b0e-271a-4c3f-92d5-5645b13b5175 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.423876] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2040.424175] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2040.425171] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8abf176a-80a9-4261-a727-c0e936575641 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.431529] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2040.431529] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52c85c86-b5a8-fe3c-892b-07983175522a" [ 2040.431529] env[63371]: _type = "Task" [ 2040.431529] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.440514] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c85c86-b5a8-fe3c-892b-07983175522a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.631839] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775199, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.754786] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 935cf583-ecde-4a10-a773-6ff765e5bb49] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2040.852923] env[63371]: DEBUG oslo_concurrency.lockutils [req-1d5e58e2-e5e0-4b7c-af8c-e8a66979aa12 req-14ae0153-ebd6-48df-83f1-299bd68b9de0 service nova] Releasing lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2040.942721] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52c85c86-b5a8-fe3c-892b-07983175522a, 'name': SearchDatastore_Task, 'duration_secs': 0.059888} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.943627] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69a5af13-fd15-4c2d-bad3-618e3e16a794 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.949166] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2040.949166] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]529322eb-d4ad-1b79-2bf3-503504872f11" [ 2040.949166] env[63371]: _type = "Task" [ 2040.949166] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.957233] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529322eb-d4ad-1b79-2bf3-503504872f11, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.130448] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775199, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.258564] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: cf63c2a2-ee72-464e-944d-5e53ca8635ac] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2041.460374] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]529322eb-d4ad-1b79-2bf3-503504872f11, 'name': SearchDatastore_Task, 'duration_secs': 0.086307} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.460682] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2041.460948] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 8591c7e6-37a5-421f-8627-28a3b022537a/8591c7e6-37a5-421f-8627-28a3b022537a.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2041.461290] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff3892e7-c5e3-49d7-8803-664100f20279 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.468388] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2041.468388] env[63371]: value = "task-1775200" [ 2041.468388] env[63371]: _type = "Task" [ 2041.468388] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.477091] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775200, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.631720] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775199, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.762212] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: c04edf6d-8a07-4776-be0f-b763fb3059d2] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2041.979099] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775200, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.132577] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775199, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.265426] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 9985dbcd-4498-4629-aae5-5e1933307c50] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2042.480120] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775200, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.633444] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775199, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.263483} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.633792] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6a997963-9627-47ab-bf68-1b38285cf7d9/6a997963-9627-47ab-bf68-1b38285cf7d9.vmdk to [datastore1] 3d2dabd1-5c4f-4997-843c-e1e124b687ba/3d2dabd1-5c4f-4997-843c-e1e124b687ba.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2042.634779] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa93af8d-b997-43fb-a14f-f513ee86a372 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.658768] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 3d2dabd1-5c4f-4997-843c-e1e124b687ba/3d2dabd1-5c4f-4997-843c-e1e124b687ba.vmdk or device None with type streamOptimized {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2042.659129] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6766bf8-f1fe-4501-ba83-206d9e5a5f6b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.678745] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2042.678745] env[63371]: value = "task-1775201" [ 2042.678745] env[63371]: _type = "Task" [ 2042.678745] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.686935] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775201, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.754674] env[63371]: DEBUG oslo_concurrency.lockutils [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "6df9af10-0053-4696-920a-10ab2af67ef5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2042.755020] env[63371]: DEBUG oslo_concurrency.lockutils [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "6df9af10-0053-4696-920a-10ab2af67ef5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2042.755241] env[63371]: DEBUG oslo_concurrency.lockutils [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "6df9af10-0053-4696-920a-10ab2af67ef5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2042.755426] env[63371]: DEBUG oslo_concurrency.lockutils [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "6df9af10-0053-4696-920a-10ab2af67ef5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2042.755593] env[63371]: DEBUG oslo_concurrency.lockutils [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "6df9af10-0053-4696-920a-10ab2af67ef5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2042.757937] env[63371]: INFO nova.compute.manager [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Terminating instance [ 2042.759683] env[63371]: DEBUG nova.compute.manager [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2042.759877] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2042.760725] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44158b9c-15f3-423e-b144-d1585dc2f56a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.767910] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2042.768194] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bcdc6cec-c2c0-408d-b8f7-3ae2596c5dda {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.769707] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 44a392e4-32c1-4aaf-8dc0-7df50c1a28c6] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2042.776155] env[63371]: DEBUG oslo_vmware.api [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 2042.776155] env[63371]: value = "task-1775202" [ 2042.776155] env[63371]: _type = "Task" [ 2042.776155] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.783518] env[63371]: DEBUG oslo_vmware.api [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775202, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.980721] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775200, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.276542} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2042.981048] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 8591c7e6-37a5-421f-8627-28a3b022537a/8591c7e6-37a5-421f-8627-28a3b022537a.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2042.981320] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2042.981990] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-527dc684-c166-4b64-8f81-fe7624effb58 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.987362] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2042.987362] env[63371]: value = "task-1775203" [ 2042.987362] env[63371]: _type = "Task" [ 2042.987362] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.994900] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775203, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.188676] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775201, 'name': ReconfigVM_Task, 'duration_secs': 0.362183} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.188977] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 3d2dabd1-5c4f-4997-843c-e1e124b687ba/3d2dabd1-5c4f-4997-843c-e1e124b687ba.vmdk or device None with type streamOptimized {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2043.189649] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a65660a-86a7-4ad4-825f-ffc6647c2dfa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.195500] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2043.195500] env[63371]: value = "task-1775204" [ 2043.195500] env[63371]: _type = "Task" [ 2043.195500] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.203217] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775204, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.272961] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 0c9156ea-81c4-4286-a20b-66068a5bce59] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2043.285706] env[63371]: DEBUG oslo_vmware.api [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775202, 'name': PowerOffVM_Task, 'duration_secs': 0.232936} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.286028] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2043.286209] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2043.286446] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4967c6ef-c5a7-4794-b501-4916675faaab {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.352408] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2043.352628] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2043.352823] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleting the datastore file [datastore1] 6df9af10-0053-4696-920a-10ab2af67ef5 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2043.353101] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ec10f2d-336c-4666-8149-5774d8f0be1a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.358997] env[63371]: DEBUG oslo_vmware.api [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for the task: (returnval){ [ 2043.358997] env[63371]: value = "task-1775206" [ 2043.358997] env[63371]: _type = "Task" [ 2043.358997] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.366694] env[63371]: DEBUG oslo_vmware.api [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775206, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.497452] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775203, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.338639} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.497788] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2043.498649] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640be006-1b22-4bf4-8d5d-edb0ce044e6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.521435] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] 8591c7e6-37a5-421f-8627-28a3b022537a/8591c7e6-37a5-421f-8627-28a3b022537a.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2043.521725] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9bf74a4f-1db0-43be-b5aa-7819116cc375 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.542303] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2043.542303] env[63371]: value = "task-1775207" [ 2043.542303] env[63371]: _type = "Task" [ 2043.542303] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.549957] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775207, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.706928] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775204, 'name': Rename_Task, 'duration_secs': 0.130185} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.707149] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2043.707452] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f62ac548-2758-4f6c-9a1b-96faca983cd4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.713107] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2043.713107] env[63371]: value = "task-1775208" [ 2043.713107] env[63371]: _type = "Task" [ 2043.713107] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.720158] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775208, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.776999] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: c03e2dc4-75d9-4fbb-afc8-046cbbf908ac] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2043.868543] env[63371]: DEBUG oslo_vmware.api [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Task: {'id': task-1775206, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.369347} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.868814] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2043.869020] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2043.869204] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2043.869374] env[63371]: INFO nova.compute.manager [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 2043.869967] env[63371]: DEBUG oslo.service.loopingcall [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2043.869967] env[63371]: DEBUG nova.compute.manager [-] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2043.869967] env[63371]: DEBUG nova.network.neutron [-] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2044.052930] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775207, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.109535] env[63371]: DEBUG nova.compute.manager [req-6161d790-2d0a-4fe4-82cc-2f36f39d56e8 req-aecfa538-d74f-434b-8d80-712c6f3f836d service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Received event network-vif-deleted-a7788c55-6aa0-4056-b8d1-cff8ad8951f7 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2044.109753] env[63371]: INFO nova.compute.manager [req-6161d790-2d0a-4fe4-82cc-2f36f39d56e8 req-aecfa538-d74f-434b-8d80-712c6f3f836d service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Neutron deleted interface a7788c55-6aa0-4056-b8d1-cff8ad8951f7; detaching it from the instance and deleting it from the info cache [ 2044.109924] env[63371]: DEBUG nova.network.neutron [req-6161d790-2d0a-4fe4-82cc-2f36f39d56e8 req-aecfa538-d74f-434b-8d80-712c6f3f836d service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2044.223064] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775208, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.280288] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: d00602b9-16bf-4c11-bc47-6076dddbf159] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2044.554213] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775207, 'name': ReconfigVM_Task, 'duration_secs': 0.930507} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2044.554551] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Reconfigured VM instance instance-0000007b to attach disk [datastore1] 8591c7e6-37a5-421f-8627-28a3b022537a/8591c7e6-37a5-421f-8627-28a3b022537a.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2044.555322] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-00d3b247-db99-4759-9c4b-fe1a3c951c29 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.562459] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2044.562459] env[63371]: value = "task-1775209" [ 2044.562459] env[63371]: _type = "Task" [ 2044.562459] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.574394] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775209, 'name': Rename_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.588915] env[63371]: DEBUG nova.network.neutron [-] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2044.612655] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1b0dc99c-1cd8-42bb-88d3-d80cc1f33fa1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.621967] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ba1a49-e25b-4edc-b6b2-84c836d9aceb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.647606] env[63371]: DEBUG nova.compute.manager [req-6161d790-2d0a-4fe4-82cc-2f36f39d56e8 req-aecfa538-d74f-434b-8d80-712c6f3f836d service nova] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Detach interface failed, port_id=a7788c55-6aa0-4056-b8d1-cff8ad8951f7, reason: Instance 6df9af10-0053-4696-920a-10ab2af67ef5 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2044.723145] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775208, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.783397] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 9862b0f0-ccf6-4e69-9e78-cf864adaa65e] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2045.073008] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775209, 'name': Rename_Task, 'duration_secs': 0.141714} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.073304] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2045.073546] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90e32bac-2fda-47f9-9bfb-b460f7b4fc05 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.080135] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2045.080135] env[63371]: value = "task-1775210" [ 2045.080135] env[63371]: _type = "Task" [ 2045.080135] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.087276] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775210, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.092810] env[63371]: INFO nova.compute.manager [-] [instance: 6df9af10-0053-4696-920a-10ab2af67ef5] Took 1.22 seconds to deallocate network for instance. [ 2045.223589] env[63371]: DEBUG oslo_vmware.api [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775208, 'name': PowerOnVM_Task, 'duration_secs': 1.032549} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.223804] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2045.286794] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 88fd17e6-bf88-4f7d-a204-6fb0f5c7e9ec] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2045.321946] env[63371]: DEBUG nova.compute.manager [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2045.322941] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b851d2ad-9351-49a3-b94e-ec7df1448a87 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.592092] env[63371]: DEBUG oslo_vmware.api [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775210, 'name': PowerOnVM_Task, 'duration_secs': 0.457317} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.592383] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2045.592592] env[63371]: INFO nova.compute.manager [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Took 8.37 seconds to spawn the instance on the hypervisor. [ 2045.592790] env[63371]: DEBUG nova.compute.manager [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2045.593600] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5627a1-f0ad-4a55-bbca-d8e461466720 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.600515] env[63371]: DEBUG oslo_concurrency.lockutils [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2045.600762] env[63371]: DEBUG oslo_concurrency.lockutils [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2045.600977] env[63371]: DEBUG nova.objects.instance [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lazy-loading 'resources' on Instance uuid 6df9af10-0053-4696-920a-10ab2af67ef5 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2045.792235] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: b523486c-adae-4322-80be-1f3bf33ca192] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2045.842290] env[63371]: DEBUG oslo_concurrency.lockutils [None req-17ee2b28-f25c-4131-876a-f80bc73be16a tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 22.565s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.115903] env[63371]: INFO nova.compute.manager [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Took 13.14 seconds to build instance. [ 2046.172130] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53adadff-0630-4991-bf95-41554c10cdd5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.179451] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28789be2-2b75-4503-a680-7bb7f2d89d52 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.210321] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92d4e5e-3a9c-4ccd-9ce2-5f09c84bfb75 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.217602] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98e35d4b-92a4-4550-bb1e-af1aa98d169d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.229943] env[63371]: DEBUG nova.compute.provider_tree [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2046.293735] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 44cc8606-24f5-4f6b-b96f-3559c9c3f06e] Instance has had 0 of 5 cleanup attempts {{(pid=63371) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11242}} [ 2046.502236] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.502492] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.502726] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.502915] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.503103] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.505267] env[63371]: INFO nova.compute.manager [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Terminating instance [ 2046.507294] env[63371]: DEBUG nova.compute.manager [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2046.507511] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2046.508342] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c47e5e-f56b-497f-be6b-08d64dfe3342 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.516590] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2046.516843] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-636cc4fc-f4c6-43e6-9191-fb0e20b9b71a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.522936] env[63371]: DEBUG oslo_vmware.api [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2046.522936] env[63371]: value = "task-1775211" [ 2046.522936] env[63371]: _type = "Task" [ 2046.522936] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.530416] env[63371]: DEBUG oslo_vmware.api [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775211, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.617711] env[63371]: DEBUG oslo_concurrency.lockutils [None req-fa6202d6-44a2-4378-bdee-6d428ff7f098 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "8591c7e6-37a5-421f-8627-28a3b022537a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.645s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.732995] env[63371]: DEBUG nova.scheduler.client.report [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2046.797442] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2046.797698] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Cleaning up deleted instances with incomplete migration {{(pid=63371) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11267}} [ 2047.033032] env[63371]: DEBUG oslo_vmware.api [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775211, 'name': PowerOffVM_Task, 'duration_secs': 0.193331} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.033276] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2047.033450] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2047.033692] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7b40940-895e-41fc-bc8f-ce984bcda922 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.112085] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2047.112325] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2047.112508] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleting the datastore file [datastore1] 3d2dabd1-5c4f-4997-843c-e1e124b687ba {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2047.112782] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c124ee0-1726-4257-a465-8610bcee03b3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.119162] env[63371]: DEBUG oslo_vmware.api [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for the task: (returnval){ [ 2047.119162] env[63371]: value = "task-1775213" [ 2047.119162] env[63371]: _type = "Task" [ 2047.119162] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.131020] env[63371]: DEBUG oslo_vmware.api [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775213, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.238177] env[63371]: DEBUG oslo_concurrency.lockutils [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.637s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2047.257506] env[63371]: INFO nova.scheduler.client.report [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Deleted allocations for instance 6df9af10-0053-4696-920a-10ab2af67ef5 [ 2047.275927] env[63371]: DEBUG nova.compute.manager [req-66459111-4164-4319-b7c6-b579e32280d1 req-61ffe8e7-34ef-441a-a180-28c2b82b15b3 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Received event network-changed-fd85bc34-1186-464a-b7f2-5c62353373fb {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2047.276220] env[63371]: DEBUG nova.compute.manager [req-66459111-4164-4319-b7c6-b579e32280d1 req-61ffe8e7-34ef-441a-a180-28c2b82b15b3 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Refreshing instance network info cache due to event network-changed-fd85bc34-1186-464a-b7f2-5c62353373fb. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2047.276735] env[63371]: DEBUG oslo_concurrency.lockutils [req-66459111-4164-4319-b7c6-b579e32280d1 req-61ffe8e7-34ef-441a-a180-28c2b82b15b3 service nova] Acquiring lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2047.276735] env[63371]: DEBUG oslo_concurrency.lockutils [req-66459111-4164-4319-b7c6-b579e32280d1 req-61ffe8e7-34ef-441a-a180-28c2b82b15b3 service nova] Acquired lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2047.277411] env[63371]: DEBUG nova.network.neutron [req-66459111-4164-4319-b7c6-b579e32280d1 req-61ffe8e7-34ef-441a-a180-28c2b82b15b3 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Refreshing network info cache for port fd85bc34-1186-464a-b7f2-5c62353373fb {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2047.299944] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2047.629301] env[63371]: DEBUG oslo_vmware.api [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Task: {'id': task-1775213, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15688} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2047.629550] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2047.629696] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2047.629862] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2047.630077] env[63371]: INFO nova.compute.manager [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2047.630351] env[63371]: DEBUG oslo.service.loopingcall [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2047.630553] env[63371]: DEBUG nova.compute.manager [-] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2047.630645] env[63371]: DEBUG nova.network.neutron [-] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2047.765473] env[63371]: DEBUG oslo_concurrency.lockutils [None req-803736c9-eba0-4a7e-8399-4b17b232a577 tempest-ServerActionsTestOtherA-1526347176 tempest-ServerActionsTestOtherA-1526347176-project-member] Lock "6df9af10-0053-4696-920a-10ab2af67ef5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.010s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2048.147051] env[63371]: DEBUG nova.network.neutron [req-66459111-4164-4319-b7c6-b579e32280d1 req-61ffe8e7-34ef-441a-a180-28c2b82b15b3 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Updated VIF entry in instance network info cache for port fd85bc34-1186-464a-b7f2-5c62353373fb. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2048.147485] env[63371]: DEBUG nova.network.neutron [req-66459111-4164-4319-b7c6-b579e32280d1 req-61ffe8e7-34ef-441a-a180-28c2b82b15b3 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Updating instance_info_cache with network_info: [{"id": "fd85bc34-1186-464a-b7f2-5c62353373fb", "address": "fa:16:3e:12:88:f0", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd85bc34-11", "ovs_interfaceid": "fd85bc34-1186-464a-b7f2-5c62353373fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2048.219938] env[63371]: DEBUG nova.compute.manager [req-d6962020-a25c-4558-80a3-484b74f62428 req-a7138fd8-0899-4a61-bcf4-9e226a7e5437 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Received event network-vif-deleted-a21caeee-a9c4-4ead-8c4e-4dc84446b5b4 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2048.220236] env[63371]: INFO nova.compute.manager [req-d6962020-a25c-4558-80a3-484b74f62428 req-a7138fd8-0899-4a61-bcf4-9e226a7e5437 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Neutron deleted interface a21caeee-a9c4-4ead-8c4e-4dc84446b5b4; detaching it from the instance and deleting it from the info cache [ 2048.220416] env[63371]: DEBUG nova.network.neutron [req-d6962020-a25c-4558-80a3-484b74f62428 req-a7138fd8-0899-4a61-bcf4-9e226a7e5437 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2048.650689] env[63371]: DEBUG oslo_concurrency.lockutils [req-66459111-4164-4319-b7c6-b579e32280d1 req-61ffe8e7-34ef-441a-a180-28c2b82b15b3 service nova] Releasing lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2048.690326] env[63371]: DEBUG nova.network.neutron [-] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2048.723319] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5573011-a4be-4f72-bde1-a865fa0e39da {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.733297] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9a98746-f613-4574-97f5-240352d8eacb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.757914] env[63371]: DEBUG nova.compute.manager [req-d6962020-a25c-4558-80a3-484b74f62428 req-a7138fd8-0899-4a61-bcf4-9e226a7e5437 service nova] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Detach interface failed, port_id=a21caeee-a9c4-4ead-8c4e-4dc84446b5b4, reason: Instance 3d2dabd1-5c4f-4997-843c-e1e124b687ba could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2048.801694] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2048.801694] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2048.801822] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2049.195189] env[63371]: INFO nova.compute.manager [-] [instance: 3d2dabd1-5c4f-4997-843c-e1e124b687ba] Took 1.56 seconds to deallocate network for instance. [ 2049.701181] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2049.701519] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.701746] env[63371]: DEBUG nova.objects.instance [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lazy-loading 'resources' on Instance uuid 3d2dabd1-5c4f-4997-843c-e1e124b687ba {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2050.261902] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b3c0ea-dedc-4783-9b44-7ed03bbf8a13 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.270510] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49882d25-46f8-4a98-9450-38deb2c6d574 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.300599] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3602751f-93a0-43fc-be91-e4fea0a0debb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.308227] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2069dc2c-279b-47fd-a58c-d26c79d307ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.323602] env[63371]: DEBUG nova.compute.provider_tree [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2050.826733] env[63371]: DEBUG nova.scheduler.client.report [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2051.330950] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.629s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.352837] env[63371]: INFO nova.scheduler.client.report [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Deleted allocations for instance 3d2dabd1-5c4f-4997-843c-e1e124b687ba [ 2051.860664] env[63371]: DEBUG oslo_concurrency.lockutils [None req-2e7ba6e0-5cc4-402d-b912-1e139cf8e175 tempest-ServerActionsTestOtherB-610614522 tempest-ServerActionsTestOtherB-610614522-project-member] Lock "3d2dabd1-5c4f-4997-843c-e1e124b687ba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.358s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2053.330178] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Didn't find any instances for network info cache update. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10015}} [ 2053.330521] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2053.330772] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2053.331041] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2073.537723] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2073.538118] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.041156] env[63371]: INFO nova.compute.manager [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Detaching volume 44fef38f-3bfe-4eb9-814e-26572a81abc3 [ 2074.071289] env[63371]: INFO nova.virt.block_device [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Attempting to driver detach volume 44fef38f-3bfe-4eb9-814e-26572a81abc3 from mountpoint /dev/sdb [ 2074.071669] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Volume detach. Driver type: vmdk {{(pid=63371) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2074.071980] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368525', 'volume_id': '44fef38f-3bfe-4eb9-814e-26572a81abc3', 'name': 'volume-44fef38f-3bfe-4eb9-814e-26572a81abc3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d', 'attached_at': '', 'detached_at': '', 'volume_id': '44fef38f-3bfe-4eb9-814e-26572a81abc3', 'serial': '44fef38f-3bfe-4eb9-814e-26572a81abc3'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2074.073317] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c44bc7-6a4f-484a-8fd6-631bbdfa6042 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.104147] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927da192-aff5-4e2c-85c0-727fffeadb8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.110545] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45658c99-17c3-4592-84e5-9de00ed35b32 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.131266] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44094188-2513-481e-b255-6c4a43a5a88e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.145187] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] The volume has not been displaced from its original location: [datastore1] volume-44fef38f-3bfe-4eb9-814e-26572a81abc3/volume-44fef38f-3bfe-4eb9-814e-26572a81abc3.vmdk. No consolidation needed. {{(pid=63371) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2074.150261] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Reconfiguring VM instance instance-00000077 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2074.150512] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96e8bcbc-0d09-4892-b6f1-cbd690aed6c6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.170288] env[63371]: DEBUG oslo_vmware.api [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2074.170288] env[63371]: value = "task-1775215" [ 2074.170288] env[63371]: _type = "Task" [ 2074.170288] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2074.178955] env[63371]: DEBUG oslo_vmware.api [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775215, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.679690] env[63371]: DEBUG oslo_vmware.api [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775215, 'name': ReconfigVM_Task, 'duration_secs': 0.222445} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2074.680035] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Reconfigured VM instance instance-00000077 to detach disk 2001 {{(pid=63371) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2074.684589] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7a2a8c2-306a-4248-bd59-1f288981d474 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.700013] env[63371]: DEBUG oslo_vmware.api [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2074.700013] env[63371]: value = "task-1775216" [ 2074.700013] env[63371]: _type = "Task" [ 2074.700013] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2074.707345] env[63371]: DEBUG oslo_vmware.api [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775216, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.209887] env[63371]: DEBUG oslo_vmware.api [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775216, 'name': ReconfigVM_Task, 'duration_secs': 0.147747} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.210196] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-368525', 'volume_id': '44fef38f-3bfe-4eb9-814e-26572a81abc3', 'name': 'volume-44fef38f-3bfe-4eb9-814e-26572a81abc3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'attached', 'instance': '9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d', 'attached_at': '', 'detached_at': '', 'volume_id': '44fef38f-3bfe-4eb9-814e-26572a81abc3', 'serial': '44fef38f-3bfe-4eb9-814e-26572a81abc3'} {{(pid=63371) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2075.749079] env[63371]: DEBUG nova.objects.instance [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lazy-loading 'flavor' on Instance uuid 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2076.758037] env[63371]: DEBUG oslo_concurrency.lockutils [None req-93939128-33b8-4647-8380-bc4c0497d9bb tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.220s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.773699] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.773975] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.774213] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.774752] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.774942] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.777029] env[63371]: INFO nova.compute.manager [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Terminating instance [ 2077.778635] env[63371]: DEBUG nova.compute.manager [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2077.778875] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2077.779722] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298d90f5-52d1-4ff9-b6ce-ad8326f5a17a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.787329] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2077.787527] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91a43214-518b-4481-bd84-05f62f267eb2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.796020] env[63371]: DEBUG oslo_vmware.api [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2077.796020] env[63371]: value = "task-1775217" [ 2077.796020] env[63371]: _type = "Task" [ 2077.796020] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.802728] env[63371]: DEBUG oslo_vmware.api [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775217, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.304034] env[63371]: DEBUG oslo_vmware.api [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775217, 'name': PowerOffVM_Task, 'duration_secs': 0.170261} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.304034] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2078.304034] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2078.304277] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-345763e4-cc70-4039-9440-e88f09c628ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.382437] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2078.382649] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2078.382834] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleting the datastore file [datastore1] 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2078.383124] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b6ec9311-defb-4608-870f-73d0cbfd402f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.389970] env[63371]: DEBUG oslo_vmware.api [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2078.389970] env[63371]: value = "task-1775219" [ 2078.389970] env[63371]: _type = "Task" [ 2078.389970] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.397535] env[63371]: DEBUG oslo_vmware.api [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.900203] env[63371]: DEBUG oslo_vmware.api [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775219, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142762} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.900612] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2078.900612] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2078.900763] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2078.900936] env[63371]: INFO nova.compute.manager [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2078.901196] env[63371]: DEBUG oslo.service.loopingcall [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2078.901382] env[63371]: DEBUG nova.compute.manager [-] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2078.901478] env[63371]: DEBUG nova.network.neutron [-] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2079.320511] env[63371]: DEBUG nova.compute.manager [req-0baf3876-3c69-496e-a792-e1cfec1d2ad3 req-ab60405a-d2a4-4f0c-8df8-0d9d31a6d83d service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Received event network-vif-deleted-14d87d33-0ac4-480f-b86e-c9e13b3e3e4e {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2079.320511] env[63371]: INFO nova.compute.manager [req-0baf3876-3c69-496e-a792-e1cfec1d2ad3 req-ab60405a-d2a4-4f0c-8df8-0d9d31a6d83d service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Neutron deleted interface 14d87d33-0ac4-480f-b86e-c9e13b3e3e4e; detaching it from the instance and deleting it from the info cache [ 2079.320734] env[63371]: DEBUG nova.network.neutron [req-0baf3876-3c69-496e-a792-e1cfec1d2ad3 req-ab60405a-d2a4-4f0c-8df8-0d9d31a6d83d service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2079.792831] env[63371]: DEBUG nova.network.neutron [-] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2079.822612] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-76634239-35c3-478d-9779-ca0253f30d23 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.831616] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7035ccfa-d0df-4124-8218-cbef062d052b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.856008] env[63371]: DEBUG nova.compute.manager [req-0baf3876-3c69-496e-a792-e1cfec1d2ad3 req-ab60405a-d2a4-4f0c-8df8-0d9d31a6d83d service nova] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Detach interface failed, port_id=14d87d33-0ac4-480f-b86e-c9e13b3e3e4e, reason: Instance 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2080.295738] env[63371]: INFO nova.compute.manager [-] [instance: 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d] Took 1.39 seconds to deallocate network for instance. [ 2080.430752] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2080.802591] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2080.802872] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2080.803108] env[63371]: DEBUG nova.objects.instance [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lazy-loading 'resources' on Instance uuid 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2081.430255] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.430494] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.430620] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2081.430776] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2081.435274] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e36ab8cf-e29b-419b-87e6-5501f81f7af4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.444614] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e4f77d-ccf7-4c57-84f1-f8cdf1e849c5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.473714] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffc411d-da38-4a65-9da7-cbc20c7e75f9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.480444] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ada044-6b22-450a-b2fe-5e1312b61d90 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.493232] env[63371]: DEBUG nova.compute.provider_tree [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2081.934204] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.996450] env[63371]: DEBUG nova.scheduler.client.report [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2082.502196] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.699s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2082.504570] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.571s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2082.504754] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2082.504910] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2082.505878] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd397d0-a49a-4e71-bc84-dd978feb932d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.514233] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728a4b31-eb96-4f38-a7a5-3ed0d574ee57 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.529813] env[63371]: INFO nova.scheduler.client.report [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleted allocations for instance 9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d [ 2082.531208] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a0f194-8646-4b56-a7df-90215a9cffcf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.539747] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59dd2ec9-513b-4a4c-be9e-65e6e39dc81c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.569673] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180995MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2082.569857] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.570016] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2083.039046] env[63371]: DEBUG oslo_concurrency.lockutils [None req-cc945a11-e1bd-4bb1-885e-289575941fc9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "9f2ae444-4dd8-45cc-b8c7-0a04f2f62b0d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.265s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2083.598237] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 8591c7e6-37a5-421f-8627-28a3b022537a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2083.598237] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2083.598237] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2083.624072] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68400897-24d1-43bd-a370-76d089c0ab18 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.632011] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8b6e6a-b554-45e3-b1b2-31a34be654ba {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.663439] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b409f9c1-e794-445b-ba51-5da1a28bdad7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.670894] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c21b49c-b38e-4946-8426-d496e29c9391 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.684291] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2084.187405] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2084.630416] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "96098783-edc0-4ce6-866f-bc17a9961aa5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.630675] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.674675] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "8591c7e6-37a5-421f-8627-28a3b022537a" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2084.674902] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "8591c7e6-37a5-421f-8627-28a3b022537a" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.675094] env[63371]: DEBUG nova.compute.manager [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2084.675981] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12137bdd-6390-466c-a6a1-0f5233334edb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.682984] env[63371]: DEBUG nova.compute.manager [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63371) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3369}} [ 2084.683534] env[63371]: DEBUG nova.objects.instance [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'flavor' on Instance uuid 8591c7e6-37a5-421f-8627-28a3b022537a {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2084.695114] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2084.695315] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.125s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.133364] env[63371]: DEBUG nova.compute.manager [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Starting instance... {{(pid=63371) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2433}} [ 2085.187418] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2085.187737] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1bc74bc-0746-4e22-8594-d93db5600a97 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.196009] env[63371]: DEBUG oslo_vmware.api [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2085.196009] env[63371]: value = "task-1775220" [ 2085.196009] env[63371]: _type = "Task" [ 2085.196009] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.203749] env[63371]: DEBUG oslo_vmware.api [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775220, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.653892] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.654182] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.655783] env[63371]: INFO nova.compute.claims [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2085.695992] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2085.696250] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2085.696350] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2085.696480] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2085.707032] env[63371]: DEBUG oslo_vmware.api [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775220, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.202296] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Skipping network cache update for instance because it is Building. {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9942}} [ 2086.202520] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2086.202649] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquired lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2086.202783] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Forcefully refreshing network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2086.202921] env[63371]: DEBUG nova.objects.instance [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lazy-loading 'info_cache' on Instance uuid 8591c7e6-37a5-421f-8627-28a3b022537a {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2086.209442] env[63371]: DEBUG oslo_vmware.api [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775220, 'name': PowerOffVM_Task, 'duration_secs': 0.546402} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.209719] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2086.209894] env[63371]: DEBUG nova.compute.manager [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2086.210664] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c466d37-8e3d-4622-b0ae-383224524fcc {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.701295] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d86a2a1-f923-46a8-aed2-ba1cc795854d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.710796] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e56c3373-9765-40be-b8bb-259cc71e8083 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.743022] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e691f732-4d2f-4537-ae41-d9e69b9c1e8b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.745754] env[63371]: DEBUG oslo_concurrency.lockutils [None req-c577990a-6c73-4132-9f9c-3b9ea4fc92b8 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "8591c7e6-37a5-421f-8627-28a3b022537a" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.071s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.751626] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83052b14-23b7-4dd3-ae61-0791ca8be5bd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.766324] env[63371]: DEBUG nova.compute.provider_tree [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2087.069885] env[63371]: DEBUG nova.objects.instance [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'flavor' on Instance uuid 8591c7e6-37a5-421f-8627-28a3b022537a {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2087.268647] env[63371]: DEBUG nova.scheduler.client.report [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2087.575630] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2087.772982] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.119s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.773574] env[63371]: DEBUG nova.compute.manager [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Start building networks asynchronously for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2830}} [ 2087.921070] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Updating instance_info_cache with network_info: [{"id": "fd85bc34-1186-464a-b7f2-5c62353373fb", "address": "fa:16:3e:12:88:f0", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd85bc34-11", "ovs_interfaceid": "fd85bc34-1186-464a-b7f2-5c62353373fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.279053] env[63371]: DEBUG nova.compute.utils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Using /dev/sd instead of None {{(pid=63371) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2088.280556] env[63371]: DEBUG nova.compute.manager [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Allocating IP information in the background. {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1982}} [ 2088.280735] env[63371]: DEBUG nova.network.neutron [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] allocate_for_instance() {{(pid=63371) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2088.319292] env[63371]: DEBUG nova.policy [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0af72e7f1e644797b480011450d02e02', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da713632f95146f1986c0d8a9e529ca0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63371) authorize /opt/stack/nova/nova/policy.py:201}} [ 2088.424606] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Releasing lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2088.425453] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Updated the network info_cache for instance {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2088.425453] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2088.425453] env[63371]: DEBUG nova.network.neutron [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2088.425453] env[63371]: DEBUG nova.objects.instance [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'info_cache' on Instance uuid 8591c7e6-37a5-421f-8627-28a3b022537a {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2088.426979] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.426979] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.427121] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.574079] env[63371]: DEBUG nova.network.neutron [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Successfully created port: 487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2088.783537] env[63371]: DEBUG nova.compute.manager [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Start building block device mappings for instance. {{(pid=63371) _build_resources /opt/stack/nova/nova/compute/manager.py:2865}} [ 2088.928948] env[63371]: DEBUG nova.objects.base [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Object Instance<8591c7e6-37a5-421f-8627-28a3b022537a> lazy-loaded attributes: flavor,info_cache {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2089.631352] env[63371]: DEBUG nova.network.neutron [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Updating instance_info_cache with network_info: [{"id": "fd85bc34-1186-464a-b7f2-5c62353373fb", "address": "fa:16:3e:12:88:f0", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd85bc34-11", "ovs_interfaceid": "fd85bc34-1186-464a-b7f2-5c62353373fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2089.797765] env[63371]: DEBUG nova.compute.manager [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Start spawning the instance on the hypervisor. {{(pid=63371) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2639}} [ 2089.822707] env[63371]: DEBUG nova.virt.hardware [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-11T21:16:28Z,direct_url=,disk_format='vmdk',id=1aeb47a7-4e18-481d-b3c0-d33e8c7839d9,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='494d32be2d74438a81d240ce9a488f98',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-11T21:16:28Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2089.822953] env[63371]: DEBUG nova.virt.hardware [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2089.823123] env[63371]: DEBUG nova.virt.hardware [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2089.823314] env[63371]: DEBUG nova.virt.hardware [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2089.823477] env[63371]: DEBUG nova.virt.hardware [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2089.823639] env[63371]: DEBUG nova.virt.hardware [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2089.823842] env[63371]: DEBUG nova.virt.hardware [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2089.824012] env[63371]: DEBUG nova.virt.hardware [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2089.824195] env[63371]: DEBUG nova.virt.hardware [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2089.824356] env[63371]: DEBUG nova.virt.hardware [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2089.824525] env[63371]: DEBUG nova.virt.hardware [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2089.825405] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f481235-e038-4909-a772-c6a26c6d444b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.833388] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3040bd53-c7b1-4935-a09f-383c4b811997 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.133920] env[63371]: DEBUG oslo_concurrency.lockutils [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2090.192855] env[63371]: DEBUG nova.compute.manager [req-236f8e71-169b-410d-886e-312abdd666a1 req-3a842836-bada-45c9-aac3-52fd7cb25ccd service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Received event network-vif-plugged-487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2090.193094] env[63371]: DEBUG oslo_concurrency.lockutils [req-236f8e71-169b-410d-886e-312abdd666a1 req-3a842836-bada-45c9-aac3-52fd7cb25ccd service nova] Acquiring lock "96098783-edc0-4ce6-866f-bc17a9961aa5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2090.193304] env[63371]: DEBUG oslo_concurrency.lockutils [req-236f8e71-169b-410d-886e-312abdd666a1 req-3a842836-bada-45c9-aac3-52fd7cb25ccd service nova] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2090.193476] env[63371]: DEBUG oslo_concurrency.lockutils [req-236f8e71-169b-410d-886e-312abdd666a1 req-3a842836-bada-45c9-aac3-52fd7cb25ccd service nova] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.193646] env[63371]: DEBUG nova.compute.manager [req-236f8e71-169b-410d-886e-312abdd666a1 req-3a842836-bada-45c9-aac3-52fd7cb25ccd service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] No waiting events found dispatching network-vif-plugged-487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2090.193823] env[63371]: WARNING nova.compute.manager [req-236f8e71-169b-410d-886e-312abdd666a1 req-3a842836-bada-45c9-aac3-52fd7cb25ccd service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Received unexpected event network-vif-plugged-487877ba-9a41-4afc-81e7-07dfbf50f256 for instance with vm_state building and task_state spawning. [ 2090.273424] env[63371]: DEBUG nova.network.neutron [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Successfully updated port: 487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2090.637404] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2090.637736] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a96e2f06-4d0f-4911-9778-fb5e54946bc9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.645216] env[63371]: DEBUG oslo_vmware.api [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2090.645216] env[63371]: value = "task-1775221" [ 2090.645216] env[63371]: _type = "Task" [ 2090.645216] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.653037] env[63371]: DEBUG oslo_vmware.api [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775221, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.776635] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2090.776806] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2090.776943] env[63371]: DEBUG nova.network.neutron [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2091.155495] env[63371]: DEBUG oslo_vmware.api [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775221, 'name': PowerOnVM_Task, 'duration_secs': 0.407486} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.155973] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2091.155973] env[63371]: DEBUG nova.compute.manager [None req-58e7d8cb-0d11-489c-bc48-3798ba6c2d40 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2091.156752] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa041f2c-921e-433b-8ded-a7157e9a7d65 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.310291] env[63371]: DEBUG nova.network.neutron [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Instance cache missing network info. {{(pid=63371) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2091.432766] env[63371]: DEBUG nova.network.neutron [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updating instance_info_cache with network_info: [{"id": "487877ba-9a41-4afc-81e7-07dfbf50f256", "address": "fa:16:3e:f0:41:09", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap487877ba-9a", "ovs_interfaceid": "487877ba-9a41-4afc-81e7-07dfbf50f256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2091.935540] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2091.935851] env[63371]: DEBUG nova.compute.manager [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Instance network_info: |[{"id": "487877ba-9a41-4afc-81e7-07dfbf50f256", "address": "fa:16:3e:f0:41:09", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap487877ba-9a", "ovs_interfaceid": "487877ba-9a41-4afc-81e7-07dfbf50f256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63371) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1997}} [ 2091.936312] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:41:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '487877ba-9a41-4afc-81e7-07dfbf50f256', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2091.945074] env[63371]: DEBUG oslo.service.loopingcall [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2091.945632] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2091.945868] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78ba595e-99ef-4bcf-8f85-74014c7d39ec {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.965960] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2091.965960] env[63371]: value = "task-1775222" [ 2091.965960] env[63371]: _type = "Task" [ 2091.965960] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.973896] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775222, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.221550] env[63371]: DEBUG nova.compute.manager [req-cf6ab455-7940-470c-ab0a-a73777f5a70c req-3909bcda-d2b0-4aa9-9c9a-babb92c02ee2 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Received event network-changed-487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2092.221794] env[63371]: DEBUG nova.compute.manager [req-cf6ab455-7940-470c-ab0a-a73777f5a70c req-3909bcda-d2b0-4aa9-9c9a-babb92c02ee2 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Refreshing instance network info cache due to event network-changed-487877ba-9a41-4afc-81e7-07dfbf50f256. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2092.221925] env[63371]: DEBUG oslo_concurrency.lockutils [req-cf6ab455-7940-470c-ab0a-a73777f5a70c req-3909bcda-d2b0-4aa9-9c9a-babb92c02ee2 service nova] Acquiring lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2092.222083] env[63371]: DEBUG oslo_concurrency.lockutils [req-cf6ab455-7940-470c-ab0a-a73777f5a70c req-3909bcda-d2b0-4aa9-9c9a-babb92c02ee2 service nova] Acquired lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.222247] env[63371]: DEBUG nova.network.neutron [req-cf6ab455-7940-470c-ab0a-a73777f5a70c req-3909bcda-d2b0-4aa9-9c9a-babb92c02ee2 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Refreshing network info cache for port 487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2092.476695] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775222, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.916209] env[63371]: DEBUG nova.network.neutron [req-cf6ab455-7940-470c-ab0a-a73777f5a70c req-3909bcda-d2b0-4aa9-9c9a-babb92c02ee2 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updated VIF entry in instance network info cache for port 487877ba-9a41-4afc-81e7-07dfbf50f256. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2092.916607] env[63371]: DEBUG nova.network.neutron [req-cf6ab455-7940-470c-ab0a-a73777f5a70c req-3909bcda-d2b0-4aa9-9c9a-babb92c02ee2 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updating instance_info_cache with network_info: [{"id": "487877ba-9a41-4afc-81e7-07dfbf50f256", "address": "fa:16:3e:f0:41:09", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap487877ba-9a", "ovs_interfaceid": "487877ba-9a41-4afc-81e7-07dfbf50f256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2092.977456] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775222, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.155825] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2093.331746] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c176dd-a266-4829-813c-383577cf733c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.338750] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-36ff483c-1cf5-407d-b6ff-0ff163f1546a tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Suspending the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2093.338980] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e2661e69-432f-402e-bffa-bcad7bee5d2f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.345768] env[63371]: DEBUG oslo_vmware.api [None req-36ff483c-1cf5-407d-b6ff-0ff163f1546a tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2093.345768] env[63371]: value = "task-1775223" [ 2093.345768] env[63371]: _type = "Task" [ 2093.345768] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.354235] env[63371]: DEBUG oslo_vmware.api [None req-36ff483c-1cf5-407d-b6ff-0ff163f1546a tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775223, 'name': SuspendVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.420122] env[63371]: DEBUG oslo_concurrency.lockutils [req-cf6ab455-7940-470c-ab0a-a73777f5a70c req-3909bcda-d2b0-4aa9-9c9a-babb92c02ee2 service nova] Releasing lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2093.477747] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775222, 'name': CreateVM_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.855958] env[63371]: DEBUG oslo_vmware.api [None req-36ff483c-1cf5-407d-b6ff-0ff163f1546a tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775223, 'name': SuspendVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.978400] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775222, 'name': CreateVM_Task, 'duration_secs': 1.840276} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.978575] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2093.979347] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2093.979552] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2093.979918] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2093.980269] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3189cb3a-a4da-4fbe-88e2-a96f1c3d3487 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.984997] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2093.984997] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f4e6cd-78eb-137f-21d6-8929c40e87a8" [ 2093.984997] env[63371]: _type = "Task" [ 2093.984997] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.997093] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f4e6cd-78eb-137f-21d6-8929c40e87a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.356513] env[63371]: DEBUG oslo_vmware.api [None req-36ff483c-1cf5-407d-b6ff-0ff163f1546a tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775223, 'name': SuspendVM_Task, 'duration_secs': 0.680405} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.356818] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-36ff483c-1cf5-407d-b6ff-0ff163f1546a tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Suspended the VM {{(pid=63371) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2094.356951] env[63371]: DEBUG nova.compute.manager [None req-36ff483c-1cf5-407d-b6ff-0ff163f1546a tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2094.357692] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e012029-dafa-4c38-9987-2ceecb2b3aa2 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.496316] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f4e6cd-78eb-137f-21d6-8929c40e87a8, 'name': SearchDatastore_Task, 'duration_secs': 0.013924} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.496632] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2094.496897] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Processing image 1aeb47a7-4e18-481d-b3c0-d33e8c7839d9 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2094.497100] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2094.497248] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2094.497422] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2094.497674] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7618e041-c49e-45ca-b13e-f718a2edacdb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.505855] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2094.506026] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2094.506786] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8a53dcc-9788-4d02-8c2c-5778c26eb88f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.511373] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2094.511373] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5209f71e-43c4-5cfc-a086-fb64c0b0b9d4" [ 2094.511373] env[63371]: _type = "Task" [ 2094.511373] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.518558] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5209f71e-43c4-5cfc-a086-fb64c0b0b9d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.021487] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]5209f71e-43c4-5cfc-a086-fb64c0b0b9d4, 'name': SearchDatastore_Task, 'duration_secs': 0.008627} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.022823] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44f6909b-8481-40a9-8ccc-cdcda4a1fc0f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.028054] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2095.028054] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]524f6726-a6c2-8472-2080-34ae7b09ea7a" [ 2095.028054] env[63371]: _type = "Task" [ 2095.028054] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.035268] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524f6726-a6c2-8472-2080-34ae7b09ea7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.537343] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]524f6726-a6c2-8472-2080-34ae7b09ea7a, 'name': SearchDatastore_Task, 'duration_secs': 0.00983} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.537785] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2095.537889] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 96098783-edc0-4ce6-866f-bc17a9961aa5/96098783-edc0-4ce6-866f-bc17a9961aa5.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2095.538093] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd23cfd6-d68c-4090-abbe-83002c2ee211 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.544447] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2095.544447] env[63371]: value = "task-1775224" [ 2095.544447] env[63371]: _type = "Task" [ 2095.544447] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.551349] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775224, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.658150] env[63371]: INFO nova.compute.manager [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Resuming [ 2095.658787] env[63371]: DEBUG nova.objects.instance [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'flavor' on Instance uuid 8591c7e6-37a5-421f-8627-28a3b022537a {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2096.055132] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775224, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.554757] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775224, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.869903} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.555099] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9/1aeb47a7-4e18-481d-b3c0-d33e8c7839d9.vmdk to [datastore1] 96098783-edc0-4ce6-866f-bc17a9961aa5/96098783-edc0-4ce6-866f-bc17a9961aa5.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2096.555192] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Extending root virtual disk to 1048576 {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2096.555423] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bf35e4dc-6b92-4726-9e3b-dbf67091013c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.561730] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2096.561730] env[63371]: value = "task-1775225" [ 2096.561730] env[63371]: _type = "Task" [ 2096.561730] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.568789] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775225, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.667141] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2096.667340] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquired lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2096.667496] env[63371]: DEBUG nova.network.neutron [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2097.071429] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775225, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058725} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.071695] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Extended root virtual disk {{(pid=63371) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2097.072459] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d9f02b-8517-4147-a8ee-0366058eccbd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.093808] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 96098783-edc0-4ce6-866f-bc17a9961aa5/96098783-edc0-4ce6-866f-bc17a9961aa5.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2097.094083] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88859313-ef50-4a4a-b705-3622351a4b1b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.113009] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2097.113009] env[63371]: value = "task-1775226" [ 2097.113009] env[63371]: _type = "Task" [ 2097.113009] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.120483] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775226, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.371103] env[63371]: DEBUG nova.network.neutron [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Updating instance_info_cache with network_info: [{"id": "fd85bc34-1186-464a-b7f2-5c62353373fb", "address": "fa:16:3e:12:88:f0", "network": {"id": "2c20b3ed-8db5-4efc-bce8-d6d67668b460", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-1061312671-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.191", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f98ab0107f5040139ef8be7c3ae22207", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dbd2870d-a51d-472a-8034-1b3e132b5cb6", "external-id": "nsx-vlan-transportzone-101", "segmentation_id": 101, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd85bc34-11", "ovs_interfaceid": "fd85bc34-1186-464a-b7f2-5c62353373fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2097.622492] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775226, 'name': ReconfigVM_Task, 'duration_secs': 0.283037} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.622838] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 96098783-edc0-4ce6-866f-bc17a9961aa5/96098783-edc0-4ce6-866f-bc17a9961aa5.vmdk or device None with type sparse {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2097.623415] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-726e4924-e9ff-4cf6-916b-e1a90dda55ca {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.630645] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2097.630645] env[63371]: value = "task-1775227" [ 2097.630645] env[63371]: _type = "Task" [ 2097.630645] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.638012] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775227, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.873732] env[63371]: DEBUG oslo_concurrency.lockutils [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Releasing lock "refresh_cache-8591c7e6-37a5-421f-8627-28a3b022537a" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2097.874660] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7aed3b5-2112-40ec-ad70-04b6268e28e4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.881148] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Resuming the VM {{(pid=63371) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2097.881361] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12c5a245-906f-4725-8338-70e4c21dbb59 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.887116] env[63371]: DEBUG oslo_vmware.api [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2097.887116] env[63371]: value = "task-1775228" [ 2097.887116] env[63371]: _type = "Task" [ 2097.887116] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.893932] env[63371]: DEBUG oslo_vmware.api [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775228, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.140671] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775227, 'name': Rename_Task, 'duration_secs': 0.146931} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.140919] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2098.141216] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-33e97690-c3e5-49c7-97aa-2b22b3565c6a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.147897] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2098.147897] env[63371]: value = "task-1775229" [ 2098.147897] env[63371]: _type = "Task" [ 2098.147897] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2098.155481] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775229, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.398177] env[63371]: DEBUG oslo_vmware.api [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775228, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.658599] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775229, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.898095] env[63371]: DEBUG oslo_vmware.api [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775228, 'name': PowerOnVM_Task, 'duration_secs': 0.53566} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.898385] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Resumed the VM {{(pid=63371) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2098.898576] env[63371]: DEBUG nova.compute.manager [None req-4816453f-1cbb-469a-9990-3c2843ca2d48 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2098.899379] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394486e2-5336-412e-be31-0f5b1cc86446 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.159200] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775229, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.659889] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775229, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.159867] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775229, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.350736] env[63371]: DEBUG oslo_concurrency.lockutils [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "8591c7e6-37a5-421f-8627-28a3b022537a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.351046] env[63371]: DEBUG oslo_concurrency.lockutils [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "8591c7e6-37a5-421f-8627-28a3b022537a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2100.351283] env[63371]: DEBUG oslo_concurrency.lockutils [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "8591c7e6-37a5-421f-8627-28a3b022537a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.351475] env[63371]: DEBUG oslo_concurrency.lockutils [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "8591c7e6-37a5-421f-8627-28a3b022537a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2100.351645] env[63371]: DEBUG oslo_concurrency.lockutils [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "8591c7e6-37a5-421f-8627-28a3b022537a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2100.353754] env[63371]: INFO nova.compute.manager [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Terminating instance [ 2100.355501] env[63371]: DEBUG nova.compute.manager [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2100.355747] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2100.356583] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef42727-17f6-4c01-ba85-fb73bcee78a7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.363949] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2100.364460] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-75181f0a-74f2-49a0-a566-26266c8fa2ac {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.369758] env[63371]: DEBUG oslo_vmware.api [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2100.369758] env[63371]: value = "task-1775230" [ 2100.369758] env[63371]: _type = "Task" [ 2100.369758] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.377362] env[63371]: DEBUG oslo_vmware.api [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775230, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.660563] env[63371]: DEBUG oslo_vmware.api [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775229, 'name': PowerOnVM_Task, 'duration_secs': 2.368867} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.660921] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2100.661042] env[63371]: INFO nova.compute.manager [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Took 10.86 seconds to spawn the instance on the hypervisor. [ 2100.661239] env[63371]: DEBUG nova.compute.manager [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2100.661998] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b4d077-e77c-48d4-80ef-661f3a703cf0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.880026] env[63371]: DEBUG oslo_vmware.api [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775230, 'name': PowerOffVM_Task, 'duration_secs': 0.283036} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.880263] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2100.880439] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2100.880676] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad6070c3-edb2-44fb-b9cc-bd08fa388f6e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.956059] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2100.956308] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2100.956492] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleting the datastore file [datastore1] 8591c7e6-37a5-421f-8627-28a3b022537a {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2100.956776] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf4a831b-d9aa-4e00-8a2c-ca4eecb685a5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.962880] env[63371]: DEBUG oslo_vmware.api [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for the task: (returnval){ [ 2100.962880] env[63371]: value = "task-1775232" [ 2100.962880] env[63371]: _type = "Task" [ 2100.962880] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.970374] env[63371]: DEBUG oslo_vmware.api [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775232, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.179842] env[63371]: INFO nova.compute.manager [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Took 15.54 seconds to build instance. [ 2101.472858] env[63371]: DEBUG oslo_vmware.api [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Task: {'id': task-1775232, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125399} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.473134] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2101.473281] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2101.473407] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2101.473608] env[63371]: INFO nova.compute.manager [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 2101.473879] env[63371]: DEBUG oslo.service.loopingcall [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2101.474100] env[63371]: DEBUG nova.compute.manager [-] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2101.474201] env[63371]: DEBUG nova.network.neutron [-] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2101.682249] env[63371]: DEBUG oslo_concurrency.lockutils [None req-8545dee9-8f6c-403f-8ca4-b413a7f75371 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.051s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.933812] env[63371]: DEBUG nova.compute.manager [req-1a7af00f-800f-4824-bf34-689da970b4cd req-43cc3ae7-0776-486c-8072-19dec2cb3ea8 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Received event network-vif-deleted-fd85bc34-1186-464a-b7f2-5c62353373fb {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2101.933940] env[63371]: INFO nova.compute.manager [req-1a7af00f-800f-4824-bf34-689da970b4cd req-43cc3ae7-0776-486c-8072-19dec2cb3ea8 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Neutron deleted interface fd85bc34-1186-464a-b7f2-5c62353373fb; detaching it from the instance and deleting it from the info cache [ 2101.934135] env[63371]: DEBUG nova.network.neutron [req-1a7af00f-800f-4824-bf34-689da970b4cd req-43cc3ae7-0776-486c-8072-19dec2cb3ea8 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2102.129940] env[63371]: DEBUG nova.compute.manager [req-a7ad332e-dd2f-4831-80b3-9aab90ce13df req-38c205d9-0586-4641-a4b5-c08f5d7b920a service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Received event network-changed-487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2102.130168] env[63371]: DEBUG nova.compute.manager [req-a7ad332e-dd2f-4831-80b3-9aab90ce13df req-38c205d9-0586-4641-a4b5-c08f5d7b920a service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Refreshing instance network info cache due to event network-changed-487877ba-9a41-4afc-81e7-07dfbf50f256. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2102.130378] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7ad332e-dd2f-4831-80b3-9aab90ce13df req-38c205d9-0586-4641-a4b5-c08f5d7b920a service nova] Acquiring lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2102.130774] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7ad332e-dd2f-4831-80b3-9aab90ce13df req-38c205d9-0586-4641-a4b5-c08f5d7b920a service nova] Acquired lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2102.130774] env[63371]: DEBUG nova.network.neutron [req-a7ad332e-dd2f-4831-80b3-9aab90ce13df req-38c205d9-0586-4641-a4b5-c08f5d7b920a service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Refreshing network info cache for port 487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2102.404941] env[63371]: DEBUG nova.network.neutron [-] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2102.438277] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c68c7c7e-1516-4af3-91b3-cc063baa9b74 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.447910] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e4b024-c747-47fb-9739-ee8db96b8004 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.471449] env[63371]: DEBUG nova.compute.manager [req-1a7af00f-800f-4824-bf34-689da970b4cd req-43cc3ae7-0776-486c-8072-19dec2cb3ea8 service nova] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Detach interface failed, port_id=fd85bc34-1186-464a-b7f2-5c62353373fb, reason: Instance 8591c7e6-37a5-421f-8627-28a3b022537a could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2102.839432] env[63371]: DEBUG nova.network.neutron [req-a7ad332e-dd2f-4831-80b3-9aab90ce13df req-38c205d9-0586-4641-a4b5-c08f5d7b920a service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updated VIF entry in instance network info cache for port 487877ba-9a41-4afc-81e7-07dfbf50f256. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2102.839802] env[63371]: DEBUG nova.network.neutron [req-a7ad332e-dd2f-4831-80b3-9aab90ce13df req-38c205d9-0586-4641-a4b5-c08f5d7b920a service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updating instance_info_cache with network_info: [{"id": "487877ba-9a41-4afc-81e7-07dfbf50f256", "address": "fa:16:3e:f0:41:09", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap487877ba-9a", "ovs_interfaceid": "487877ba-9a41-4afc-81e7-07dfbf50f256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2102.910069] env[63371]: INFO nova.compute.manager [-] [instance: 8591c7e6-37a5-421f-8627-28a3b022537a] Took 1.44 seconds to deallocate network for instance. [ 2103.342803] env[63371]: DEBUG oslo_concurrency.lockutils [req-a7ad332e-dd2f-4831-80b3-9aab90ce13df req-38c205d9-0586-4641-a4b5-c08f5d7b920a service nova] Releasing lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2103.416526] env[63371]: DEBUG oslo_concurrency.lockutils [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2103.416844] env[63371]: DEBUG oslo_concurrency.lockutils [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.417094] env[63371]: DEBUG nova.objects.instance [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lazy-loading 'resources' on Instance uuid 8591c7e6-37a5-421f-8627-28a3b022537a {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2103.961129] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fd54aa-3cf9-4ea9-8f4e-645b8f9f2aaf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.968659] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ecd2e5-1f12-41b7-945a-f1482920cc65 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2103.999624] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed127a5-69af-41f2-bf80-4a9110f4f338 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.006879] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a218527f-4258-489f-8bb0-c77e6305a6c9 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.019696] env[63371]: DEBUG nova.compute.provider_tree [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2104.522719] env[63371]: DEBUG nova.scheduler.client.report [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2105.027422] env[63371]: DEBUG oslo_concurrency.lockutils [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.610s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2105.045085] env[63371]: INFO nova.scheduler.client.report [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Deleted allocations for instance 8591c7e6-37a5-421f-8627-28a3b022537a [ 2105.552903] env[63371]: DEBUG oslo_concurrency.lockutils [None req-408d307c-f0f5-4432-a04a-5c12aaab1219 tempest-ServerActionsTestJSON-577892058 tempest-ServerActionsTestJSON-577892058-project-member] Lock "8591c7e6-37a5-421f-8627-28a3b022537a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.202s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2140.717360] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "96098783-edc0-4ce6-866f-bc17a9961aa5" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2140.717736] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2140.717836] env[63371]: INFO nova.compute.manager [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Shelving [ 2141.225153] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2141.225429] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e54285b8-3f2b-4da7-8879-1538a7f0804d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.232072] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2141.232072] env[63371]: value = "task-1775233" [ 2141.232072] env[63371]: _type = "Task" [ 2141.232072] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2141.239756] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775233, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2141.430304] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.430553] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2141.430703] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2141.742450] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775233, 'name': PowerOffVM_Task, 'duration_secs': 0.175825} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2141.742790] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2141.743519] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3854394b-d00f-4088-ae36-584b1aed007a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.761117] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ed5a3b-fabe-4df2-ae23-f7554dcc13bf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.270763] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Creating Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2142.271453] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7bb467d3-2e10-4dbb-8c94-42f3b45eec5a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.279213] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2142.279213] env[63371]: value = "task-1775234" [ 2142.279213] env[63371]: _type = "Task" [ 2142.279213] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2142.287509] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775234, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2142.430885] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2142.431150] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2142.789628] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775234, 'name': CreateSnapshot_Task, 'duration_secs': 0.414073} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.789628] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Created Snapshot of the VM instance {{(pid=63371) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2142.790375] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a1e8b7c-4cf2-4e18-92c6-ff93f9d13b61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.934281] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2142.934465] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2142.934627] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.934780] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2142.935662] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34650b50-6198-4997-b6b3-20ba81c7a675 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.943656] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc3f6e3-2f2b-45d7-9f46-a4a44343d502 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.958354] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb77236-5bcf-45a7-a692-527a5daf6190 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.964583] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7b2a62-7b9a-4660-a729-a735be1c6cf7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.994337] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181194MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2142.994461] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2142.994645] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2143.308292] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Creating linked-clone VM from snapshot {{(pid=63371) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2143.308598] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-f4e2a145-7491-455b-b4c1-fac0896ddadf {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.316906] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2143.316906] env[63371]: value = "task-1775235" [ 2143.316906] env[63371]: _type = "Task" [ 2143.316906] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.324372] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775235, 'name': CloneVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.828116] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775235, 'name': CloneVM_Task} progress is 94%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.018769] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 96098783-edc0-4ce6-866f-bc17a9961aa5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2144.019027] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2144.019247] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2144.049249] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-327a5cef-7195-4a9f-8b3f-4a7ee050d612 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.057097] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2904ea-2eb9-4913-b71a-f336fe56a770 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.087783] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286a1a54-ffb5-409d-80f2-d97dbaa9fa1c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.094990] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03c649e-28fc-424b-bf0d-d86ad22c8d01 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.108047] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2144.328358] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775235, 'name': CloneVM_Task} progress is 95%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.610795] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2144.827961] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775235, 'name': CloneVM_Task, 'duration_secs': 1.111607} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.828249] env[63371]: INFO nova.virt.vmwareapi.vmops [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Created linked-clone VM from snapshot [ 2144.828974] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6275b1-6497-401e-97e7-7401a63d2612 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.835769] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Uploading image cd9c8a30-7a6c-4610-b4e9-48769232d714 {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2144.856224] env[63371]: DEBUG oslo_vmware.rw_handles [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2144.856224] env[63371]: value = "vm-368533" [ 2144.856224] env[63371]: _type = "VirtualMachine" [ 2144.856224] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2144.856455] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a642d69e-8ca5-442c-818c-2399ff755d79 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.863126] env[63371]: DEBUG oslo_vmware.rw_handles [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lease: (returnval){ [ 2144.863126] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520163f2-098f-c1b4-5577-c2889346b8e5" [ 2144.863126] env[63371]: _type = "HttpNfcLease" [ 2144.863126] env[63371]: } obtained for exporting VM: (result){ [ 2144.863126] env[63371]: value = "vm-368533" [ 2144.863126] env[63371]: _type = "VirtualMachine" [ 2144.863126] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2144.863331] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the lease: (returnval){ [ 2144.863331] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520163f2-098f-c1b4-5577-c2889346b8e5" [ 2144.863331] env[63371]: _type = "HttpNfcLease" [ 2144.863331] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2144.868971] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2144.868971] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520163f2-098f-c1b4-5577-c2889346b8e5" [ 2144.868971] env[63371]: _type = "HttpNfcLease" [ 2144.868971] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2145.115547] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2145.115739] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.121s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2145.371498] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2145.371498] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520163f2-098f-c1b4-5577-c2889346b8e5" [ 2145.371498] env[63371]: _type = "HttpNfcLease" [ 2145.371498] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2145.371773] env[63371]: DEBUG oslo_vmware.rw_handles [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2145.371773] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]520163f2-098f-c1b4-5577-c2889346b8e5" [ 2145.371773] env[63371]: _type = "HttpNfcLease" [ 2145.371773] env[63371]: }. {{(pid=63371) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2145.372503] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150c7cfb-ae51-4043-b280-4a0dd9dc0f83 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.379863] env[63371]: DEBUG oslo_vmware.rw_handles [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e4be19-6733-2269-2891-5880915ada77/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2145.380053] env[63371]: DEBUG oslo_vmware.rw_handles [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e4be19-6733-2269-2891-5880915ada77/disk-0.vmdk for reading. {{(pid=63371) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2145.465576] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-65dbb170-a4d3-4d38-85cc-4bee5e32fa8a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.116067] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2147.116067] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2147.116067] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2147.116067] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2147.619943] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2147.620168] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquired lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2147.620302] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Forcefully refreshing network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2147.620463] env[63371]: DEBUG nova.objects.instance [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lazy-loading 'info_cache' on Instance uuid 96098783-edc0-4ce6-866f-bc17a9961aa5 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2149.384972] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updating instance_info_cache with network_info: [{"id": "487877ba-9a41-4afc-81e7-07dfbf50f256", "address": "fa:16:3e:f0:41:09", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap487877ba-9a", "ovs_interfaceid": "487877ba-9a41-4afc-81e7-07dfbf50f256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2149.887782] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Releasing lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2149.887978] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updated the network info_cache for instance {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2149.888211] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2149.888373] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2149.888514] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2152.500539] env[63371]: DEBUG oslo_vmware.rw_handles [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e4be19-6733-2269-2891-5880915ada77/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2152.501578] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4849f7bc-e1b9-4e6f-9499-1807d284af35 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.507610] env[63371]: DEBUG oslo_vmware.rw_handles [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e4be19-6733-2269-2891-5880915ada77/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2152.507774] env[63371]: ERROR oslo_vmware.rw_handles [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e4be19-6733-2269-2891-5880915ada77/disk-0.vmdk due to incomplete transfer. [ 2152.507984] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ca309f57-674d-40a3-a9c9-35535cbd53ae {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.514659] env[63371]: DEBUG oslo_vmware.rw_handles [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e4be19-6733-2269-2891-5880915ada77/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2152.514847] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Uploaded image cd9c8a30-7a6c-4610-b4e9-48769232d714 to the Glance image server {{(pid=63371) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2152.517045] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Destroying the VM {{(pid=63371) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2152.517263] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ccdf2c8e-5740-41a4-bb66-a84f47247e61 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.522667] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2152.522667] env[63371]: value = "task-1775237" [ 2152.522667] env[63371]: _type = "Task" [ 2152.522667] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2152.530525] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775237, 'name': Destroy_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.032272] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775237, 'name': Destroy_Task, 'duration_secs': 0.370145} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2153.032569] env[63371]: INFO nova.virt.vmwareapi.vm_util [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Destroyed the VM [ 2153.032806] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Deleting Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2153.033070] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8da88467-5ac1-4513-ac3a-ade0cb352ac0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.038730] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2153.038730] env[63371]: value = "task-1775238" [ 2153.038730] env[63371]: _type = "Task" [ 2153.038730] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2153.045863] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775238, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2153.548383] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775238, 'name': RemoveSnapshot_Task, 'duration_secs': 0.403776} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2153.548751] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Deleted Snapshot of the VM instance {{(pid=63371) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2153.548894] env[63371]: DEBUG nova.compute.manager [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2153.549633] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd6727d-cb75-45df-ad50-80865fac42bb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.060806] env[63371]: INFO nova.compute.manager [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Shelve offloading [ 2154.062491] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2154.062778] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11e4000c-3e83-4be8-8aaa-f4e69769484e {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.069310] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2154.069310] env[63371]: value = "task-1775239" [ 2154.069310] env[63371]: _type = "Task" [ 2154.069310] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2154.080116] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] VM already powered off {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2154.080298] env[63371]: DEBUG nova.compute.manager [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2154.080992] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ac79e50-d4e5-4588-bb23-6d64d0804170 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2154.086092] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2154.086280] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2154.086470] env[63371]: DEBUG nova.network.neutron [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2154.781762] env[63371]: DEBUG nova.network.neutron [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updating instance_info_cache with network_info: [{"id": "487877ba-9a41-4afc-81e7-07dfbf50f256", "address": "fa:16:3e:f0:41:09", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap487877ba-9a", "ovs_interfaceid": "487877ba-9a41-4afc-81e7-07dfbf50f256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2155.284610] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2155.485523] env[63371]: DEBUG nova.compute.manager [req-6ee6be41-8109-42fe-a04f-4af023cf3dc8 req-19f9970c-494c-44b3-b092-331c02b3f89d service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Received event network-vif-unplugged-487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2155.485750] env[63371]: DEBUG oslo_concurrency.lockutils [req-6ee6be41-8109-42fe-a04f-4af023cf3dc8 req-19f9970c-494c-44b3-b092-331c02b3f89d service nova] Acquiring lock "96098783-edc0-4ce6-866f-bc17a9961aa5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2155.485958] env[63371]: DEBUG oslo_concurrency.lockutils [req-6ee6be41-8109-42fe-a04f-4af023cf3dc8 req-19f9970c-494c-44b3-b092-331c02b3f89d service nova] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2155.486408] env[63371]: DEBUG oslo_concurrency.lockutils [req-6ee6be41-8109-42fe-a04f-4af023cf3dc8 req-19f9970c-494c-44b3-b092-331c02b3f89d service nova] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2155.486596] env[63371]: DEBUG nova.compute.manager [req-6ee6be41-8109-42fe-a04f-4af023cf3dc8 req-19f9970c-494c-44b3-b092-331c02b3f89d service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] No waiting events found dispatching network-vif-unplugged-487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2155.486767] env[63371]: WARNING nova.compute.manager [req-6ee6be41-8109-42fe-a04f-4af023cf3dc8 req-19f9970c-494c-44b3-b092-331c02b3f89d service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Received unexpected event network-vif-unplugged-487877ba-9a41-4afc-81e7-07dfbf50f256 for instance with vm_state shelved and task_state shelving_offloading. [ 2155.578286] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2155.579201] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57292148-c114-4752-a9af-3bbae722f513 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.586463] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2155.586688] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-efedbb5f-7f64-43df-9ca0-237948efab46 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.661805] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2155.662061] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2155.662248] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleting the datastore file [datastore1] 96098783-edc0-4ce6-866f-bc17a9961aa5 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2155.662511] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57f66089-9087-42c4-9fed-04faf2afacc0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.668826] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2155.668826] env[63371]: value = "task-1775241" [ 2155.668826] env[63371]: _type = "Task" [ 2155.668826] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2155.676012] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2156.179500] env[63371]: DEBUG oslo_vmware.api [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120052} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2156.179834] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2156.179997] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2156.180214] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2156.202483] env[63371]: INFO nova.scheduler.client.report [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleted allocations for instance 96098783-edc0-4ce6-866f-bc17a9961aa5 [ 2156.706909] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2156.707217] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2156.707451] env[63371]: DEBUG nova.objects.instance [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lazy-loading 'resources' on Instance uuid 96098783-edc0-4ce6-866f-bc17a9961aa5 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2157.210220] env[63371]: DEBUG nova.objects.instance [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lazy-loading 'numa_topology' on Instance uuid 96098783-edc0-4ce6-866f-bc17a9961aa5 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2157.509463] env[63371]: DEBUG nova.compute.manager [req-ca2653d5-8b80-4139-b2c4-7be68dd9dc7c req-100b7ee0-81d6-4af1-81f9-937168a0e990 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Received event network-changed-487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2157.509463] env[63371]: DEBUG nova.compute.manager [req-ca2653d5-8b80-4139-b2c4-7be68dd9dc7c req-100b7ee0-81d6-4af1-81f9-937168a0e990 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Refreshing instance network info cache due to event network-changed-487877ba-9a41-4afc-81e7-07dfbf50f256. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2157.509577] env[63371]: DEBUG oslo_concurrency.lockutils [req-ca2653d5-8b80-4139-b2c4-7be68dd9dc7c req-100b7ee0-81d6-4af1-81f9-937168a0e990 service nova] Acquiring lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2157.509691] env[63371]: DEBUG oslo_concurrency.lockutils [req-ca2653d5-8b80-4139-b2c4-7be68dd9dc7c req-100b7ee0-81d6-4af1-81f9-937168a0e990 service nova] Acquired lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2157.509851] env[63371]: DEBUG nova.network.neutron [req-ca2653d5-8b80-4139-b2c4-7be68dd9dc7c req-100b7ee0-81d6-4af1-81f9-937168a0e990 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Refreshing network info cache for port 487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2157.712499] env[63371]: DEBUG nova.objects.base [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Object Instance<96098783-edc0-4ce6-866f-bc17a9961aa5> lazy-loaded attributes: resources,numa_topology {{(pid=63371) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2157.726946] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b51e73-6123-4ad8-a1c5-a292034e31fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.734405] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4aba0be-afda-4c5d-90ba-b8d4d954e6e0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.763551] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a016d8a-94bc-4897-9390-f913b34c5907 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.770724] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cc41b2-26cf-4e00-a87e-12e198a741a0 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.784840] env[63371]: DEBUG nova.compute.provider_tree [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2158.276027] env[63371]: DEBUG nova.network.neutron [req-ca2653d5-8b80-4139-b2c4-7be68dd9dc7c req-100b7ee0-81d6-4af1-81f9-937168a0e990 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updated VIF entry in instance network info cache for port 487877ba-9a41-4afc-81e7-07dfbf50f256. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2158.276366] env[63371]: DEBUG nova.network.neutron [req-ca2653d5-8b80-4139-b2c4-7be68dd9dc7c req-100b7ee0-81d6-4af1-81f9-937168a0e990 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updating instance_info_cache with network_info: [{"id": "487877ba-9a41-4afc-81e7-07dfbf50f256", "address": "fa:16:3e:f0:41:09", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap487877ba-9a", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2158.288263] env[63371]: DEBUG nova.scheduler.client.report [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2158.778641] env[63371]: DEBUG oslo_concurrency.lockutils [req-ca2653d5-8b80-4139-b2c4-7be68dd9dc7c req-100b7ee0-81d6-4af1-81f9-937168a0e990 service nova] Releasing lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2158.792644] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.085s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.117992] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "96098783-edc0-4ce6-866f-bc17a9961aa5" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2159.299908] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e993ed2f-2951-41c2-a181-1322b669a051 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 18.582s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2159.300927] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.183s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.300927] env[63371]: INFO nova.compute.manager [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Unshelving [ 2160.324500] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2160.324758] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2160.324955] env[63371]: DEBUG nova.objects.instance [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lazy-loading 'pci_requests' on Instance uuid 96098783-edc0-4ce6-866f-bc17a9961aa5 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2160.829348] env[63371]: DEBUG nova.objects.instance [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lazy-loading 'numa_topology' on Instance uuid 96098783-edc0-4ce6-866f-bc17a9961aa5 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2161.332433] env[63371]: INFO nova.compute.claims [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2162.366301] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df4cb5ad-7c73-446c-b14b-3f4b1d574300 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.373758] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d5d276-cac3-4672-8561-1957c848acc3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.403800] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e54cd65-23e3-4567-a83d-0596247245fb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.410572] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a845d4-86fe-44dd-a6bf-467071710128 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.422937] env[63371]: DEBUG nova.compute.provider_tree [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2162.925903] env[63371]: DEBUG nova.scheduler.client.report [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2163.430489] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.106s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2163.458496] env[63371]: INFO nova.network.neutron [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updating port 487877ba-9a41-4afc-81e7-07dfbf50f256 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2164.894910] env[63371]: DEBUG nova.compute.manager [req-d24fd6ce-0a74-42d9-bcc1-a1c4fcd23f41 req-66ca3168-fcc8-46d1-8727-c74306b60fb2 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Received event network-vif-plugged-487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2164.895155] env[63371]: DEBUG oslo_concurrency.lockutils [req-d24fd6ce-0a74-42d9-bcc1-a1c4fcd23f41 req-66ca3168-fcc8-46d1-8727-c74306b60fb2 service nova] Acquiring lock "96098783-edc0-4ce6-866f-bc17a9961aa5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2164.895360] env[63371]: DEBUG oslo_concurrency.lockutils [req-d24fd6ce-0a74-42d9-bcc1-a1c4fcd23f41 req-66ca3168-fcc8-46d1-8727-c74306b60fb2 service nova] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2164.895460] env[63371]: DEBUG oslo_concurrency.lockutils [req-d24fd6ce-0a74-42d9-bcc1-a1c4fcd23f41 req-66ca3168-fcc8-46d1-8727-c74306b60fb2 service nova] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2164.895627] env[63371]: DEBUG nova.compute.manager [req-d24fd6ce-0a74-42d9-bcc1-a1c4fcd23f41 req-66ca3168-fcc8-46d1-8727-c74306b60fb2 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] No waiting events found dispatching network-vif-plugged-487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2164.895786] env[63371]: WARNING nova.compute.manager [req-d24fd6ce-0a74-42d9-bcc1-a1c4fcd23f41 req-66ca3168-fcc8-46d1-8727-c74306b60fb2 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Received unexpected event network-vif-plugged-487877ba-9a41-4afc-81e7-07dfbf50f256 for instance with vm_state shelved_offloaded and task_state spawning. [ 2164.979726] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2164.979920] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2164.980105] env[63371]: DEBUG nova.network.neutron [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Building network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2165.673835] env[63371]: DEBUG nova.network.neutron [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updating instance_info_cache with network_info: [{"id": "487877ba-9a41-4afc-81e7-07dfbf50f256", "address": "fa:16:3e:f0:41:09", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap487877ba-9a", "ovs_interfaceid": "487877ba-9a41-4afc-81e7-07dfbf50f256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2166.177112] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2166.205120] env[63371]: DEBUG nova.virt.hardware [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-11T21:16:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='180f7e27ff5229709ff1dabf659830fe',container_format='bare',created_at=2024-12-11T21:44:57Z,direct_url=,disk_format='vmdk',id=cd9c8a30-7a6c-4610-b4e9-48769232d714,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1878842761-shelved',owner='da713632f95146f1986c0d8a9e529ca0',properties=ImageMetaProps,protected=,size=31667200,status='active',tags=,updated_at=2024-12-11T21:45:09Z,virtual_size=,visibility=), allow threads: False {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2166.205120] env[63371]: DEBUG nova.virt.hardware [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Flavor limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2166.205120] env[63371]: DEBUG nova.virt.hardware [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Image limits 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2166.205120] env[63371]: DEBUG nova.virt.hardware [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Flavor pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2166.205120] env[63371]: DEBUG nova.virt.hardware [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Image pref 0:0:0 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2166.205402] env[63371]: DEBUG nova.virt.hardware [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63371) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2166.205402] env[63371]: DEBUG nova.virt.hardware [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2166.205503] env[63371]: DEBUG nova.virt.hardware [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2166.205661] env[63371]: DEBUG nova.virt.hardware [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Got 1 possible topologies {{(pid=63371) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2166.205823] env[63371]: DEBUG nova.virt.hardware [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2166.205993] env[63371]: DEBUG nova.virt.hardware [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63371) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2166.206903] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd44509-1193-4e83-936c-c6b57033d7d1 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.216183] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b7ccdc-5707-43dc-8d09-28faf0299721 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.228880] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:41:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '487877ba-9a41-4afc-81e7-07dfbf50f256', 'vif_model': 'vmxnet3'}] {{(pid=63371) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2166.236045] env[63371]: DEBUG oslo.service.loopingcall [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2166.236264] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Creating VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2166.236482] env[63371]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15904741-3652-42d9-b1bc-252900370ee7 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.254693] env[63371]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2166.254693] env[63371]: value = "task-1775242" [ 2166.254693] env[63371]: _type = "Task" [ 2166.254693] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.261777] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775242, 'name': CreateVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.764572] env[63371]: DEBUG oslo_vmware.api [-] Task: {'id': task-1775242, 'name': CreateVM_Task, 'duration_secs': 0.335961} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2166.764785] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Created VM on the ESX host {{(pid=63371) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2166.765359] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2166.765544] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2166.765940] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2166.766203] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e40f419-ca52-45f5-a52a-431270365ad5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.770504] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2166.770504] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9d778-9645-660a-ca8a-18e8ced1ef30" [ 2166.770504] env[63371]: _type = "Task" [ 2166.770504] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.777669] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52f9d778-9645-660a-ca8a-18e8ced1ef30, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2166.919133] env[63371]: DEBUG nova.compute.manager [req-6cfd4805-c3b4-4e87-9db7-ae7934a404ec req-4bcaac22-a89b-49c6-b122-a76901ff2f54 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Received event network-changed-487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2166.919324] env[63371]: DEBUG nova.compute.manager [req-6cfd4805-c3b4-4e87-9db7-ae7934a404ec req-4bcaac22-a89b-49c6-b122-a76901ff2f54 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Refreshing instance network info cache due to event network-changed-487877ba-9a41-4afc-81e7-07dfbf50f256. {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11137}} [ 2166.919539] env[63371]: DEBUG oslo_concurrency.lockutils [req-6cfd4805-c3b4-4e87-9db7-ae7934a404ec req-4bcaac22-a89b-49c6-b122-a76901ff2f54 service nova] Acquiring lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2166.919682] env[63371]: DEBUG oslo_concurrency.lockutils [req-6cfd4805-c3b4-4e87-9db7-ae7934a404ec req-4bcaac22-a89b-49c6-b122-a76901ff2f54 service nova] Acquired lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2166.919840] env[63371]: DEBUG nova.network.neutron [req-6cfd4805-c3b4-4e87-9db7-ae7934a404ec req-4bcaac22-a89b-49c6-b122-a76901ff2f54 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Refreshing network info cache for port 487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2167.280110] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2167.280444] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Processing image cd9c8a30-7a6c-4610-b4e9-48769232d714 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2167.280561] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714/cd9c8a30-7a6c-4610-b4e9-48769232d714.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2167.280665] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquired lock "[datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714/cd9c8a30-7a6c-4610-b4e9-48769232d714.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2167.280842] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2167.281115] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26b7ac0c-84a4-4011-a9f3-fb362105352f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.289942] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2167.290085] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63371) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2167.290737] env[63371]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daad1e82-ff1f-41ef-b8aa-7eebab21f8cd {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.295162] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2167.295162] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]52118529-f1dc-b3fb-abd9-d9bed4edfc31" [ 2167.295162] env[63371]: _type = "Task" [ 2167.295162] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.302111] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': session[52854284-8312-6a88-0b15-8c5a2a120aab]52118529-f1dc-b3fb-abd9-d9bed4edfc31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.800231] env[63371]: DEBUG nova.network.neutron [req-6cfd4805-c3b4-4e87-9db7-ae7934a404ec req-4bcaac22-a89b-49c6-b122-a76901ff2f54 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updated VIF entry in instance network info cache for port 487877ba-9a41-4afc-81e7-07dfbf50f256. {{(pid=63371) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2167.800567] env[63371]: DEBUG nova.network.neutron [req-6cfd4805-c3b4-4e87-9db7-ae7934a404ec req-4bcaac22-a89b-49c6-b122-a76901ff2f54 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updating instance_info_cache with network_info: [{"id": "487877ba-9a41-4afc-81e7-07dfbf50f256", "address": "fa:16:3e:f0:41:09", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap487877ba-9a", "ovs_interfaceid": "487877ba-9a41-4afc-81e7-07dfbf50f256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2167.807643] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Preparing fetch location {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2167.807888] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Fetch image to [datastore1] OSTACK_IMG_17444f76-4bfd-45a9-b3d3-f921e0003821/OSTACK_IMG_17444f76-4bfd-45a9-b3d3-f921e0003821.vmdk {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2167.808079] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Downloading stream optimized image cd9c8a30-7a6c-4610-b4e9-48769232d714 to [datastore1] OSTACK_IMG_17444f76-4bfd-45a9-b3d3-f921e0003821/OSTACK_IMG_17444f76-4bfd-45a9-b3d3-f921e0003821.vmdk on the data store datastore1 as vApp {{(pid=63371) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2167.808250] env[63371]: DEBUG nova.virt.vmwareapi.images [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Downloading image file data cd9c8a30-7a6c-4610-b4e9-48769232d714 to the ESX as VM named 'OSTACK_IMG_17444f76-4bfd-45a9-b3d3-f921e0003821' {{(pid=63371) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2167.872991] env[63371]: DEBUG oslo_vmware.rw_handles [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2167.872991] env[63371]: value = "resgroup-9" [ 2167.872991] env[63371]: _type = "ResourcePool" [ 2167.872991] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2167.873287] env[63371]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-dded729b-71cb-4b5e-91e8-652bf7a039b5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.893966] env[63371]: DEBUG oslo_vmware.rw_handles [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lease: (returnval){ [ 2167.893966] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5258b031-8c03-bb75-71ed-52ae59a97da6" [ 2167.893966] env[63371]: _type = "HttpNfcLease" [ 2167.893966] env[63371]: } obtained for vApp import into resource pool (val){ [ 2167.893966] env[63371]: value = "resgroup-9" [ 2167.893966] env[63371]: _type = "ResourcePool" [ 2167.893966] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2167.894269] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the lease: (returnval){ [ 2167.894269] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5258b031-8c03-bb75-71ed-52ae59a97da6" [ 2167.894269] env[63371]: _type = "HttpNfcLease" [ 2167.894269] env[63371]: } to be ready. {{(pid=63371) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2167.901939] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2167.901939] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5258b031-8c03-bb75-71ed-52ae59a97da6" [ 2167.901939] env[63371]: _type = "HttpNfcLease" [ 2167.901939] env[63371]: } is initializing. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2168.304081] env[63371]: DEBUG oslo_concurrency.lockutils [req-6cfd4805-c3b4-4e87-9db7-ae7934a404ec req-4bcaac22-a89b-49c6-b122-a76901ff2f54 service nova] Releasing lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2168.401819] env[63371]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2168.401819] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5258b031-8c03-bb75-71ed-52ae59a97da6" [ 2168.401819] env[63371]: _type = "HttpNfcLease" [ 2168.401819] env[63371]: } is ready. {{(pid=63371) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2168.402107] env[63371]: DEBUG oslo_vmware.rw_handles [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2168.402107] env[63371]: value = "session[52854284-8312-6a88-0b15-8c5a2a120aab]5258b031-8c03-bb75-71ed-52ae59a97da6" [ 2168.402107] env[63371]: _type = "HttpNfcLease" [ 2168.402107] env[63371]: }. {{(pid=63371) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2168.402833] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f5143e-eb6e-4c81-811a-b13cdfe86082 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2168.410309] env[63371]: DEBUG oslo_vmware.rw_handles [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b27d16-2d2f-b118-c53e-00335f9aea4b/disk-0.vmdk from lease info. {{(pid=63371) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2168.410486] env[63371]: DEBUG oslo_vmware.rw_handles [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating HTTP connection to write to file with size = 31667200 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b27d16-2d2f-b118-c53e-00335f9aea4b/disk-0.vmdk. {{(pid=63371) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2168.473992] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-c942140b-e04f-4190-81eb-c4de0d472bc6 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.561692] env[63371]: DEBUG oslo_vmware.rw_handles [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Completed reading data from the image iterator. {{(pid=63371) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2169.562144] env[63371]: DEBUG oslo_vmware.rw_handles [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b27d16-2d2f-b118-c53e-00335f9aea4b/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2169.562905] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b827865-9cb3-4137-80e4-30f94b50e543 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.569898] env[63371]: DEBUG oslo_vmware.rw_handles [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b27d16-2d2f-b118-c53e-00335f9aea4b/disk-0.vmdk is in state: ready. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2169.570082] env[63371]: DEBUG oslo_vmware.rw_handles [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b27d16-2d2f-b118-c53e-00335f9aea4b/disk-0.vmdk. {{(pid=63371) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2169.570310] env[63371]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-602dd2af-7487-472e-a2fe-b48c72636344 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.821417] env[63371]: DEBUG oslo_vmware.rw_handles [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52b27d16-2d2f-b118-c53e-00335f9aea4b/disk-0.vmdk. {{(pid=63371) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2169.821607] env[63371]: INFO nova.virt.vmwareapi.images [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Downloaded image file data cd9c8a30-7a6c-4610-b4e9-48769232d714 [ 2169.822434] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc1fc30-9395-4c26-bbac-d418a3d90d81 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.837309] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85c39db1-91cd-4c5d-abee-6814eaf38350 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.885993] env[63371]: INFO nova.virt.vmwareapi.images [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] The imported VM was unregistered [ 2169.888280] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Caching image {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2169.888505] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Creating directory with path [datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714 {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2169.888751] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-28025113-ef23-4146-95e8-708efbf9e2f5 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.902836] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Created directory with path [datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714 {{(pid=63371) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2169.903023] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_17444f76-4bfd-45a9-b3d3-f921e0003821/OSTACK_IMG_17444f76-4bfd-45a9-b3d3-f921e0003821.vmdk to [datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714/cd9c8a30-7a6c-4610-b4e9-48769232d714.vmdk. {{(pid=63371) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2169.903258] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-945cffec-e2ac-4f9e-8a96-d69b37eaf7ad {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2169.909616] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2169.909616] env[63371]: value = "task-1775245" [ 2169.909616] env[63371]: _type = "Task" [ 2169.909616] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2169.917680] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775245, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.419645] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775245, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2170.920571] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775245, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.422180] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775245, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.923058] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775245, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.424612] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775245, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.254176} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.424899] env[63371]: INFO nova.virt.vmwareapi.ds_util [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_17444f76-4bfd-45a9-b3d3-f921e0003821/OSTACK_IMG_17444f76-4bfd-45a9-b3d3-f921e0003821.vmdk to [datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714/cd9c8a30-7a6c-4610-b4e9-48769232d714.vmdk. [ 2172.425098] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Cleaning up location [datastore1] OSTACK_IMG_17444f76-4bfd-45a9-b3d3-f921e0003821 {{(pid=63371) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2172.425263] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_17444f76-4bfd-45a9-b3d3-f921e0003821 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2172.425511] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82dbd522-d70d-4ea8-9ae3-c959271148cb {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.431294] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2172.431294] env[63371]: value = "task-1775246" [ 2172.431294] env[63371]: _type = "Task" [ 2172.431294] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.438282] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775246, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.942583] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775246, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035089} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.943066] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2172.943066] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Releasing lock "[datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714/cd9c8a30-7a6c-4610-b4e9-48769232d714.vmdk" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2172.943208] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714/cd9c8a30-7a6c-4610-b4e9-48769232d714.vmdk to [datastore1] 96098783-edc0-4ce6-866f-bc17a9961aa5/96098783-edc0-4ce6-866f-bc17a9961aa5.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2172.943463] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10f8b0f7-cfe4-42eb-aa0b-5bc2ed80b968 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.950565] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2172.950565] env[63371]: value = "task-1775247" [ 2172.950565] env[63371]: _type = "Task" [ 2172.950565] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.959221] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775247, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.461406] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775247, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.962173] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775247, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.465800] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775247, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.964493] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775247, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.466367] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775247, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.966946] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775247, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.908128} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.967247] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/cd9c8a30-7a6c-4610-b4e9-48769232d714/cd9c8a30-7a6c-4610-b4e9-48769232d714.vmdk to [datastore1] 96098783-edc0-4ce6-866f-bc17a9961aa5/96098783-edc0-4ce6-866f-bc17a9961aa5.vmdk {{(pid=63371) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2175.968109] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed76404f-d94a-4059-bc25-5811cca5a898 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.989585] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 96098783-edc0-4ce6-866f-bc17a9961aa5/96098783-edc0-4ce6-866f-bc17a9961aa5.vmdk or device None with type streamOptimized {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2175.989828] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-44e5fbe5-9898-456f-9935-3731a18ca6b4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.007842] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2176.007842] env[63371]: value = "task-1775248" [ 2176.007842] env[63371]: _type = "Task" [ 2176.007842] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.014926] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775248, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.517482] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775248, 'name': ReconfigVM_Task, 'duration_secs': 0.314875} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.517779] env[63371]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 96098783-edc0-4ce6-866f-bc17a9961aa5/96098783-edc0-4ce6-866f-bc17a9961aa5.vmdk or device None with type streamOptimized {{(pid=63371) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2176.518415] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1674b4bd-8d8f-4067-90fc-7f6f4be7ed28 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.524037] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2176.524037] env[63371]: value = "task-1775249" [ 2176.524037] env[63371]: _type = "Task" [ 2176.524037] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.531333] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775249, 'name': Rename_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.036813] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775249, 'name': Rename_Task, 'duration_secs': 0.164819} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.037205] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Powering on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2177.037559] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-89a594bb-2202-45af-87b4-f0d4e2f40a5b {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.044673] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2177.044673] env[63371]: value = "task-1775250" [ 2177.044673] env[63371]: _type = "Task" [ 2177.044673] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.052950] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775250, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.554705] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775250, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.054657] env[63371]: DEBUG oslo_vmware.api [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775250, 'name': PowerOnVM_Task, 'duration_secs': 0.537086} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.055135] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Powered on the VM {{(pid=63371) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2178.146193] env[63371]: DEBUG nova.compute.manager [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Checking state {{(pid=63371) _get_power_state /opt/stack/nova/nova/compute/manager.py:1792}} [ 2178.147197] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fbd9299-2f95-4b0b-8d63-b98b93605dd3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.663675] env[63371]: DEBUG oslo_concurrency.lockutils [None req-b2b9550a-757e-4f5a-9753-36c9b550a4d9 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.363s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2201.430566] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.430378] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.430663] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.430782] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63371) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10548}} [ 2204.430566] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2204.430924] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Starting heal instance info cache {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9929}} [ 2204.430924] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Rebuilding the list of instances to heal {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9933}} [ 2205.033866] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2205.034059] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquired lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2205.034212] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Forcefully refreshing network info cache for instance {{(pid=63371) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2205.034369] env[63371]: DEBUG nova.objects.instance [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lazy-loading 'info_cache' on Instance uuid 96098783-edc0-4ce6-866f-bc17a9961aa5 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2206.755316] env[63371]: DEBUG nova.network.neutron [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updating instance_info_cache with network_info: [{"id": "487877ba-9a41-4afc-81e7-07dfbf50f256", "address": "fa:16:3e:f0:41:09", "network": {"id": "36b060a8-6ed3-42c6-aba5-8be7cd4fa2b3", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1737088683-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "da713632f95146f1986c0d8a9e529ca0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap487877ba-9a", "ovs_interfaceid": "487877ba-9a41-4afc-81e7-07dfbf50f256", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2207.258582] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Releasing lock "refresh_cache-96098783-edc0-4ce6-866f-bc17a9961aa5" {{(pid=63371) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2207.258785] env[63371]: DEBUG nova.compute.manager [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updated the network info_cache for instance {{(pid=63371) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10000}} [ 2207.259021] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager.update_available_resource {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2207.762595] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2207.762946] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2207.762984] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2207.763151] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63371) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2207.764078] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd883e1f-9f10-42a2-82e2-72af510cb349 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.772554] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2960b80-9092-404e-932e-0b3f7d1acff4 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.786167] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc762c9-2bfd-4ec9-8a85-18793149d17c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.792233] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e13259b-d1e8-4dcb-93c1-96492a77739f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.830521] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181196MB free_disk=165GB free_vcpus=48 pci_devices=None {{(pid=63371) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2207.830708] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2207.830927] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2208.854007] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Instance 96098783-edc0-4ce6-866f-bc17a9961aa5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63371) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2208.854266] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2208.854376] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63371) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2208.878454] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0382b330-32c0-4c31-9148-0046969a5c8f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.885712] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94837631-bad4-4ca6-bd64-2139af62859f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.914488] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b2df84-f197-4bfa-87f5-2d44bbd14b53 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.921083] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f292eec-ceb2-4dcf-ae54-901432927967 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.934430] env[63371]: DEBUG nova.compute.provider_tree [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2209.437094] env[63371]: DEBUG nova.scheduler.client.report [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2209.942868] env[63371]: DEBUG nova.compute.resource_tracker [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63371) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2209.943370] env[63371]: DEBUG oslo_concurrency.lockutils [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.112s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2210.114491] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.114728] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.114900] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.115079] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.019630] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "96098783-edc0-4ce6-866f-bc17a9961aa5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2215.019937] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2215.020160] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "96098783-edc0-4ce6-866f-bc17a9961aa5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2215.020362] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2215.020535] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2215.024224] env[63371]: INFO nova.compute.manager [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Terminating instance [ 2215.025922] env[63371]: DEBUG nova.compute.manager [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Start destroying the instance on the hypervisor. {{(pid=63371) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3151}} [ 2215.026171] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Destroying instance {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2215.027037] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2014c648-c974-4b25-97a1-42df831ab037 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.034897] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Powering off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2215.035146] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2eac88e-b939-479a-bd3e-4cb61ff8c0d3 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.041760] env[63371]: DEBUG oslo_vmware.api [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2215.041760] env[63371]: value = "task-1775251" [ 2215.041760] env[63371]: _type = "Task" [ 2215.041760] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2215.049467] env[63371]: DEBUG oslo_vmware.api [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775251, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.425642] env[63371]: DEBUG oslo_service.periodic_task [None req-e11f2069-9084-420a-80c4-b0f0da601c17 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63371) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2215.551674] env[63371]: DEBUG oslo_vmware.api [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775251, 'name': PowerOffVM_Task, 'duration_secs': 0.166414} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2215.551930] env[63371]: DEBUG nova.virt.vmwareapi.vm_util [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Powered off the VM {{(pid=63371) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2215.552114] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Unregistering the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2215.552361] env[63371]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-315593e8-409d-43af-9841-4760fccdf86c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.633772] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Unregistered the VM {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2215.634030] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Deleting contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2215.634169] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleting the datastore file [datastore1] 96098783-edc0-4ce6-866f-bc17a9961aa5 {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2215.634444] env[63371]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f80ff1d6-a92b-4028-9649-893060455c5f {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.640268] env[63371]: DEBUG oslo_vmware.api [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for the task: (returnval){ [ 2215.640268] env[63371]: value = "task-1775253" [ 2215.640268] env[63371]: _type = "Task" [ 2215.640268] env[63371]: } to complete. {{(pid=63371) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2215.647533] env[63371]: DEBUG oslo_vmware.api [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775253, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.150731] env[63371]: DEBUG oslo_vmware.api [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Task: {'id': task-1775253, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12333} completed successfully. {{(pid=63371) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2216.151159] env[63371]: DEBUG nova.virt.vmwareapi.ds_util [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleted the datastore file {{(pid=63371) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2216.151159] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Deleted contents of the VM from datastore datastore1 {{(pid=63371) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2216.151322] env[63371]: DEBUG nova.virt.vmwareapi.vmops [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Instance destroyed {{(pid=63371) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2216.151495] env[63371]: INFO nova.compute.manager [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2216.151741] env[63371]: DEBUG oslo.service.loopingcall [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63371) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2216.151927] env[63371]: DEBUG nova.compute.manager [-] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Deallocating network for instance {{(pid=63371) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2290}} [ 2216.152032] env[63371]: DEBUG nova.network.neutron [-] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] deallocate_for_instance() {{(pid=63371) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2216.591419] env[63371]: DEBUG nova.compute.manager [req-0cea5b84-a744-46fb-9c7c-415d69d66bdd req-a022246c-54ae-4af1-a238-bc7083aa8251 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Received event network-vif-deleted-487877ba-9a41-4afc-81e7-07dfbf50f256 {{(pid=63371) external_instance_event /opt/stack/nova/nova/compute/manager.py:11132}} [ 2216.591590] env[63371]: INFO nova.compute.manager [req-0cea5b84-a744-46fb-9c7c-415d69d66bdd req-a022246c-54ae-4af1-a238-bc7083aa8251 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Neutron deleted interface 487877ba-9a41-4afc-81e7-07dfbf50f256; detaching it from the instance and deleting it from the info cache [ 2216.591764] env[63371]: DEBUG nova.network.neutron [req-0cea5b84-a744-46fb-9c7c-415d69d66bdd req-a022246c-54ae-4af1-a238-bc7083aa8251 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2217.047056] env[63371]: DEBUG nova.network.neutron [-] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Updating instance_info_cache with network_info: [] {{(pid=63371) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2217.094441] env[63371]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e5036fde-760a-43f7-ab39-4d5647e5310c {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.104568] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26316ca-1a72-43ce-98ce-66e6b66c2caa {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2217.128264] env[63371]: DEBUG nova.compute.manager [req-0cea5b84-a744-46fb-9c7c-415d69d66bdd req-a022246c-54ae-4af1-a238-bc7083aa8251 service nova] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Detach interface failed, port_id=487877ba-9a41-4afc-81e7-07dfbf50f256, reason: Instance 96098783-edc0-4ce6-866f-bc17a9961aa5 could not be found. {{(pid=63371) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10966}} [ 2217.549408] env[63371]: INFO nova.compute.manager [-] [instance: 96098783-edc0-4ce6-866f-bc17a9961aa5] Took 1.40 seconds to deallocate network for instance. [ 2218.055559] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2218.055829] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2218.056059] env[63371]: DEBUG nova.objects.instance [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lazy-loading 'resources' on Instance uuid 96098783-edc0-4ce6-866f-bc17a9961aa5 {{(pid=63371) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2218.589899] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5900c602-f784-4560-bca3-9fcec232fa43 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.597483] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7b2963-2de4-4116-8132-38d075207f26 {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.625563] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8197a75-394e-44ac-84e3-254bb4e3987a {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.631946] env[63371]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28291eb8-8234-4d97-8a55-bedbadbad00d {{(pid=63371) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.645288] env[63371]: DEBUG nova.compute.provider_tree [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed in ProviderTree for provider: c079ebb1-2fa2-4df9-bdab-118e305653c1 {{(pid=63371) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2219.149051] env[63371]: DEBUG nova.scheduler.client.report [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Inventory has not changed for provider c079ebb1-2fa2-4df9-bdab-118e305653c1 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 165, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63371) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 2219.653550] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.598s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2219.674386] env[63371]: INFO nova.scheduler.client.report [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Deleted allocations for instance 96098783-edc0-4ce6-866f-bc17a9961aa5 [ 2220.183961] env[63371]: DEBUG oslo_concurrency.lockutils [None req-54d41b88-aa33-4bee-b446-fc4b9b659376 tempest-AttachVolumeShelveTestJSON-1384598468 tempest-AttachVolumeShelveTestJSON-1384598468-project-member] Lock "96098783-edc0-4ce6-866f-bc17a9961aa5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.164s {{(pid=63371) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}